From e53b20a3bc4954bba9fe73e36f6aac92f875ee40 Mon Sep 17 00:00:00 2001 From: Andy Date: Wed, 8 Jan 2025 11:04:19 +0800 Subject: [PATCH] feat(openspg): version 0.6 (#451) --- .../builder/core/logical/LogicalPlan.java | 3 + .../builder/core/logical/PythonNode.java | 24 + .../builder/core/physical/PhysicalPlan.java | 3 + .../operator/PythonOperatorFactory.java | 90 +--- .../process/BuilderIndexProcessor.java | 6 +- .../process/ExtractPostProcessor.java | 55 ++- .../process/LLMNlExtractProcessor.java | 35 +- .../process/ParagraphSplitProcessor.java | 135 ++---- .../physical/process/PythonProcessor.java | 63 +++ .../physical/process/VectorizerProcessor.java | 26 +- .../core/physical/utils/CommonUtils.java | 150 ++++++ .../builder/core/runtime/BuilderContext.java | 1 - .../builder/model/BuilderConstants.java | 31 ++ .../builder/model/pipeline/ExecuteNode.java | 10 +- .../builder/model/pipeline/PipelineUtils.java | 97 ++++ .../model/pipeline/config/OperatorConfig.java | 24 +- .../config/ParagraphSplitNodeConfig.java | 18 +- .../pipeline/config/PythonNodeConfig.java | 29 ++ .../model/pipeline/enums/NodeTypeEnum.java | 1 + .../runner/local/LocalBuilderMain.java | 117 +++-- .../sink/impl/GraphStoreSinkWriter.java | 2 +- .../physical/sink/impl/Neo4jSinkWriter.java | 18 +- .../source/impl/StringSourceReader.java | 6 +- .../graphstore/neo4j/Neo4jStoreClient.java | 2 - cloudext/impl/object-storage/minio/pom.xml | 41 ++ .../impl/objectstorage/minio/MinioClient.java | 286 +++++++++++ .../minio/MinioClientDriver.java | 35 ++ .../objectstorage/minio/MinioConstants.java | 20 + cloudext/impl/object-storage/oss/pom.xml | 41 ++ .../impl/objectstorage/oss/OSSClient.java | 239 +++++++++ .../objectstorage/oss/OSSClientDriver.java | 35 ++ .../impl/objectstorage/oss/OSSConstants.java | 21 + .../searchengine/neo4j/Neo4jSearchClient.java | 7 +- cloudext/interface/computing-engine/pom.xml | 35 ++ .../ComputingEngineClient.java | 28 ++ .../ComputingEngineClientDriver.java | 18 + .../ComputingEngineClientDriverManager.java | 54 +++ .../ComputingEngineConstants.java | 18 + .../model/ComputingStatusEnum.java | 23 + .../computingengine/model/ComputingTask.java | 29 ++ ...omputingengine.ComputingEngineClientDriver | 1 + cloudext/interface/object-storage/pom.xml | 35 ++ .../objectstorage/ObjectStorageClient.java | 46 ++ .../ObjectStorageClientDriver.java | 18 + .../ObjectStorageClientDriverManager.java | 55 +++ ...es.objectstorage.ObjectStorageClientDriver | 2 + cloudext/pom.xml | 4 + common/util/pom.xml | 12 + .../common/constants/BuilderConstant.java | 64 +++ .../openspg/common/util/CommonUtils.java | 59 +++ .../common/util/DozerBeanMapperUtil.java | 56 +++ .../antgroup/openspg/common/util/ECBUtil.java | 85 ++++ .../openspg/common/util/PartitionUtils.java | 200 ++++++++ .../openspg/common/util/RetryerUtil.java | 77 +++ .../openspg/common/util/StringUtils.java | 22 +- .../common/util/constants/CommonConstant.java | 20 + .../util/converter/EnumToStringConverter.java | 64 +++ .../LocalDateTimeToDateDozerConverter.java | 37 ++ ...DateTimeToLocalDateTimeDozerConverter.java | 34 ++ .../LocalDateToLocalDateDozerConverter.java | 33 ++ .../LocalTimeToLocalTimeDozerConverter.java | 33 ++ .../common/util/neo4j/Neo4jDriverManager.java | 2 +- .../common/util/neo4j/Neo4jIndexUtils.java | 6 +- .../openspg/common/util/pemja/PemjaUtils.java | 87 ++++ .../common/util/pemja/PythonInvokeMethod.java | 59 +++ .../common/util/pemja/model/PemjaConfig.java | 84 ++++ .../main/resources/dozer-custom-convert.xml | 92 ++++ dev/release/mysql/buildx-release-mysql.sh | 2 +- dev/release/mysql/sql/initdb.sql | 118 +++++ dev/release/mysql/sql/openspg-initdb.sql | 80 +++ dev/release/python/Dockerfile | 9 +- .../python/build-release-python-aliyun.sh | 4 +- dev/release/server/buildx-release-server.sh | 2 +- pom.xml | 47 +- .../reasoner/common/constants/Constants.java | 4 + .../reasoner/runner/local/rdg/LocalRDG.java | 1 - .../reasoner/rdg/common/LinkEdgeImpl.java | 24 +- .../udf/builtin/udf/JsonStringGet.java | 3 - .../reasoner/udf/model/LinkedUdtfResult.java | 3 + .../openspg/reasoner/udf/utils/DateUtils.java | 6 + .../openspg/server/api/facade/Paged.java | 7 + .../dto/common/request/ConfigRequest.java | 31 ++ .../dto/common/request/DataQueryRequest.java | 29 ++ .../common/request/DataReasonerRequest.java | 26 + .../dto/common/request/PermissionRequest.java | 32 ++ .../common/request/ProjectCreateRequest.java | 60 +-- .../common/request/ProjectQueryRequest.java | 31 ++ .../service/request/TextSearchRequest.java | 1 + .../service/request/WriterGraphRequest.java | 13 +- server/api/http-client/pom.xml | 6 + .../http/client/account/AccountService.java | 115 +++++ server/api/http-server/pom.xml | 4 + .../api/http/server/HttpBizTemplate.java | 4 + .../server/openapi/DataSourceController.java | 259 ++++++++++ .../http/server/openapi/GraphController.java | 24 +- .../server/openapi/ProjectController.java | 33 +- .../server/openapi/SchedulerController.java | 316 ++++++++++++ .../config/application-default.properties | 6 +- server/biz/common/pom.xml | 8 + .../server/biz/common/AccountManager.java | 111 +++++ .../server/biz/common/ConfigManager.java | 92 ++++ .../server/biz/common/PermissionManager.java | 114 +++++ .../server/biz/common/ProjectManager.java | 8 + .../biz/common/impl/AccountManagerImpl.java | 164 +++++++ .../biz/common/impl/ConfigManagerImpl.java | 388 +++++++++++++++ .../common/impl/PermissionManagerImpl.java | 229 +++++++++ .../biz/common/impl/ProjectManagerImpl.java | 84 +++- .../server/biz/schema/SchemaManager.java | 28 ++ .../biz/schema/impl/SchemaManagerImpl.java | 19 + .../biz/service/impl/ReasonerManagerImpl.java | 3 + .../biz/service/impl/SearchManagerImpl.java | 15 +- server/common/model/pom.xml | 4 + .../server/common/model/CommonConstants.java | 1 + .../server/common/model/CommonEnum.java | 104 ++++ .../server/common/model/account/Account.java | 85 ++++ .../common/model/bulider/BuilderJob.java | 45 ++ .../common/model/bulider/BuilderJobQuery.java | 41 ++ .../server/common/model/config/Config.java | 74 +++ .../server/common/model/data/DataRecord.java | 32 ++ .../common/model/data/EntitySampleData.java | 31 ++ .../common/model/datasource/Column.java | 36 ++ .../common/model/datasource/DataSource.java | 54 +++ .../model/datasource/DataSourceQuery.java | 39 ++ .../server/common/model/job/SubGraph.java | 1 + .../common/model/permission/Permission.java | 45 ++ .../common/model/project/AccountRoleInfo.java | 34 ++ .../common/model/scheduler/SchedulerEnum.java | 9 +- server/common/service/pom.xml | 82 ++++ .../service/account/AccountRepository.java | 108 +++++ .../service/builder/BuilderJobRepository.java | 35 ++ .../service/builder/BuilderJobService.java | 35 ++ .../builder/impl/BuilderJobServiceImpl.java | 54 +++ .../service/config/ConfigRepository.java | 53 ++ .../common/service/config/DefaultValue.java | 52 ++ .../datasource/DataSourceRepository.java | 39 ++ .../service/datasource/DataSourceService.java | 50 ++ .../impl/DataSourceServiceImpl.java | 130 +++++ .../datasource/meta/DataSourceMeta.java | 183 +++++++ .../meta/client/CloudDataSource.java | 60 +++ .../meta/client/DataSourceMetaClient.java | 115 +++++ .../meta/client/DataSourceMetaFactory.java | 45 ++ .../client/impl/DefaultMetaClientImpl.java | 82 ++++ .../meta/client/impl/JdbcClient.java | 84 ++++ .../meta/client/impl/JdbcMetaClientImpl.java | 400 +++++++++++++++ .../meta/client/impl/OdpsClient.java | 235 +++++++++ .../meta/client/impl/OdpsMetaClientImpl.java | 341 +++++++++++++ .../permission/PermissionRepository.java | 133 +++++ .../service/project/ProjectRepository.java | 2 + .../service/spring/SpringContextHolder.java | 8 + .../core/reasoner/service/impl/Utils.java | 16 +- .../core/reasoner/service/udtf/RdfExpand.java | 6 +- .../model/query/SchedulerInfoQuery.java | 40 ++ .../model/query/SchedulerInstanceQuery.java | 40 ++ .../model/query/SchedulerJobQuery.java | 40 ++ .../model/query/SchedulerTaskQuery.java | 40 ++ .../model/service/SchedulerHandlerResult.java | 36 ++ .../model/service/SchedulerInfo.java | 78 +++ .../model/service/SchedulerInfoLog.java | 52 ++ .../model/service/SchedulerInstance.java | 3 - .../model/service/SchedulerTask.java | 12 +- .../scheduler/model/task/TaskExecuteDag.java | 48 ++ server/core/scheduler/service/pom.xml | 24 + .../service/api/SchedulerService.java | 13 +- .../api/impl/SchedulerServiceImpl.java | 15 +- .../service/common/MemoryTaskServer.java | 143 ++++++ .../common/SchedulerCommonService.java | 71 ++- .../service/config/SchedulerConfig.java | 41 +- .../impl/SchedulerExecuteServiceImpl.java | 16 +- .../service/handler/SchedulerHandler.java | 10 +- .../client/db/SchedulerHandlerClient.java | 296 ++++++++++++ .../local/LocalSchedulerHandlerClient.java | 90 ++++ .../impl/ExecuteInstanceScheduleHandler.java | 49 ++ .../impl/GenerateInstanceScheduleHandler.java | 51 ++ .../impl/local/LocalSchedulerHandler.java | 92 ---- .../metadata/SchedulerInfoService.java | 45 ++ .../metadata/SchedulerInstanceService.java | 8 +- .../service/metadata/SchedulerJobService.java | 5 +- .../metadata/SchedulerTaskService.java | 8 +- .../impl/db/SchedulerInfoServiceImpl.java | 70 +++ .../impl/db/SchedulerInstanceServiceImpl.java | 78 +++ .../impl/db/SchedulerJobServiceImpl.java | 55 +++ .../impl/db/SchedulerTaskServiceImpl.java | 91 ++++ .../local/LocalSchedulerInfoServiceImpl.java | 133 +++++ .../LocalSchedulerInstanceServiceImpl.java | 16 +- .../local/LocalSchedulerJobServiceImpl.java | 8 +- .../local/LocalSchedulerTaskServiceImpl.java | 12 +- .../repository/SchedulerInfoRepository.java | 49 ++ .../SchedulerInstanceRepository.java | 47 ++ .../repository/SchedulerJobRepository.java | 40 ++ .../repository/SchedulerTaskRepository.java | 57 +++ .../service/task/TaskExecuteTemplate.java | 10 +- .../builder/ComputingEngineAsyncTask.java | 212 ++++++++ .../async/builder/KagAlignmentAsyncTask.java | 221 +++++++++ .../async/builder/KagExtractorAsyncTask.java | 280 +++++++++++ .../async/builder/KagSplitterAsyncTask.java | 228 +++++++++ .../async/builder/KagVectorizerAsyncTask.java | 225 +++++++++ .../async/builder/KagWriterAsyncTask.java | 246 ++++++++++ .../task/sync/builder/KagReaderSyncTask.java | 88 ++++ .../service/translate/Translate.java | 11 + .../builder/KagBuilderTranslate.java | 147 ++++++ .../schema/model/predicate/IndexTypeEnum.java | 25 +- .../service/predicate/RelationService.java | 8 + .../predicate/impl/RelationServiceImpl.java | 42 ++ .../schema/service/type/SPGTypeService.java | 20 + .../service/type/impl/SPGTypeServiceImpl.java | 17 + server/infra/dao/pom.xml | 8 + .../infra/dao/dataobject/AccountDO.java | 72 +++ .../infra/dao/dataobject/BuilderJobDO.java | 41 ++ .../server/infra/dao/dataobject/ConfigDO.java | 60 +++ .../infra/dao/dataobject/DataSourceDO.java | 40 ++ .../infra/dao/dataobject/PermissionDO.java | 50 ++ .../infra/dao/dataobject/SchedulerInfoDO.java | 52 ++ .../dao/dataobject/SchedulerInstanceDO.java | 76 +++ .../infra/dao/dataobject/SchedulerJobDO.java | 71 +++ .../infra/dao/dataobject/SchedulerTaskDO.java | 82 ++++ .../infra/dao/mapper/AccountMapper.java | 112 +++++ .../infra/dao/mapper/BuilderJobDOMapper.java | 33 ++ .../server/infra/dao/mapper/ConfigMapper.java | 113 +++++ .../infra/dao/mapper/DataSourceDOMapper.java | 35 ++ .../infra/dao/mapper/PermissionMapper.java | 277 +++++++++++ .../infra/dao/mapper/ProjectDOMapper.java | 5 +- .../dao/mapper/SchedulerInfoDOMapper.java | 47 ++ .../dao/mapper/SchedulerInstanceDOMapper.java | 44 ++ .../dao/mapper/SchedulerJobDOMapper.java | 33 ++ .../dao/mapper/SchedulerTaskDOMapper.java | 55 +++ .../common/AccountRepositoryImpl.java | 127 +++++ .../common/BuilderJobRepositoryImpl.java | 69 +++ .../common/ConfigRepositoryImpl.java | 64 +++ .../common/DataSourceRepositoryImpl.java | 75 +++ .../common/PermissionRepositoryImpl.java | 212 ++++++++ .../common/ProjectRepositoryImpl.java | 26 +- .../common/convertor/AccountConvertor.java | 86 ++++ .../common/convertor/BuilderJobConvertor.java | 62 +++ .../common/convertor/ConfigConvertor.java | 47 ++ .../common/convertor/DataSourceConvertor.java | 72 +++ .../common/convertor/PermissionConvertor.java | 45 ++ .../SchedulerInfoRepositoryImpl.java | 84 ++++ .../SchedulerInstanceRepositoryImpl.java | 80 +++ .../scheduler/SchedulerJobRepositoryImpl.java | 69 +++ .../SchedulerTaskRepositoryImpl.java | 97 ++++ .../convertor/SchedulerInfoConvertor.java | 79 +++ .../convertor/SchedulerInstanceConvertor.java | 80 +++ .../convertor/SchedulerJobConvertor.java | 70 +++ .../convertor/SchedulerTaskConvertor.java | 70 +++ .../main/resources/mapper/AccountMapper.xml | 457 ++++++++++++++++++ .../resources/mapper/BuilderJobDOMapper.xml | 200 ++++++++ .../main/resources/mapper/ConfigMapper.xml | 267 ++++++++++ .../resources/mapper/DataSourceDOMapper.xml | 194 ++++++++ .../resources/mapper/PermissionMapper.xml | 436 +++++++++++++++++ .../main/resources/mapper/ProjectDOMapper.xml | 34 +- .../mapper/SchedulerInfoDOMapper.xml | 146 ++++++ .../mapper/SchedulerInstanceDOMapper.xml | 216 +++++++++ .../resources/mapper/SchedulerJobDOMapper.xml | 179 +++++++ .../mapper/SchedulerTaskDOMapper.xml | 226 +++++++++ .../resources/mybatis-generator-config.xml | 16 + server/pom.xml | 5 + .../test/kgschema/SPGSchemaFacadeTest.groovy | 202 -------- .../scheduler/SchedulerServiceImplTest.java | 60 +-- .../translate/LocalExampleTranslateMock.java | 8 +- .../resources/config/application.properties | 5 +- 260 files changed, 17115 insertions(+), 867 deletions(-) create mode 100644 builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/PythonNode.java create mode 100644 builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/PythonProcessor.java create mode 100644 builder/model/src/main/java/com/antgroup/openspg/builder/model/BuilderConstants.java create mode 100644 builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/PipelineUtils.java create mode 100644 builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/PythonNodeConfig.java create mode 100644 cloudext/impl/object-storage/minio/pom.xml create mode 100644 cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClient.java create mode 100644 cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClientDriver.java create mode 100644 cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioConstants.java create mode 100644 cloudext/impl/object-storage/oss/pom.xml create mode 100644 cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClient.java create mode 100644 cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClientDriver.java create mode 100644 cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSConstants.java create mode 100644 cloudext/interface/computing-engine/pom.xml create mode 100644 cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClient.java create mode 100644 cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriver.java create mode 100644 cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriverManager.java create mode 100644 cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineConstants.java create mode 100644 cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingStatusEnum.java create mode 100644 cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingTask.java create mode 100644 cloudext/interface/computing-engine/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.computingengine.ComputingEngineClientDriver create mode 100644 cloudext/interface/object-storage/pom.xml create mode 100644 cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClient.java create mode 100644 cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriver.java create mode 100644 cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriverManager.java create mode 100644 cloudext/interface/object-storage/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriver create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/constants/BuilderConstant.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/CommonUtils.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/DozerBeanMapperUtil.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/ECBUtil.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/PartitionUtils.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/RetryerUtil.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/constants/CommonConstant.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/converter/EnumToStringConverter.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToDateDozerConverter.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToLocalDateTimeDozerConverter.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateToLocalDateDozerConverter.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalTimeToLocalTimeDozerConverter.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PemjaUtils.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java create mode 100644 common/util/src/main/java/com/antgroup/openspg/common/util/pemja/model/PemjaConfig.java create mode 100644 common/util/src/main/resources/dozer-custom-convert.xml create mode 100644 server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ConfigRequest.java create mode 100644 server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataQueryRequest.java create mode 100644 server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataReasonerRequest.java create mode 100644 server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/PermissionRequest.java create mode 100644 server/api/http-client/src/main/java/com/antgroup/openspg/server/api/http/client/account/AccountService.java create mode 100644 server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/DataSourceController.java create mode 100644 server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/SchedulerController.java create mode 100644 server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/AccountManager.java create mode 100644 server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ConfigManager.java create mode 100644 server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/PermissionManager.java create mode 100644 server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/AccountManagerImpl.java create mode 100644 server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ConfigManagerImpl.java create mode 100644 server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/PermissionManagerImpl.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonEnum.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/account/Account.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJob.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJobQuery.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/config/Config.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/DataRecord.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/EntitySampleData.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/Column.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSource.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSourceQuery.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/permission/Permission.java create mode 100644 server/common/model/src/main/java/com/antgroup/openspg/server/common/model/project/AccountRoleInfo.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/account/AccountRepository.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobRepository.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobService.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/impl/BuilderJobServiceImpl.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/ConfigRepository.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/DefaultValue.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceRepository.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceService.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/impl/DataSourceServiceImpl.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/DataSourceMeta.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/CloudDataSource.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaClient.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaFactory.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/DefaultMetaClientImpl.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcClient.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcMetaClientImpl.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsClient.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsMetaClientImpl.java create mode 100644 server/common/service/src/main/java/com/antgroup/openspg/server/common/service/permission/PermissionRepository.java create mode 100644 server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInfoQuery.java create mode 100644 server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInstanceQuery.java create mode 100644 server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerJobQuery.java create mode 100644 server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerTaskQuery.java create mode 100644 server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerHandlerResult.java create mode 100644 server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfo.java create mode 100644 server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfoLog.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/MemoryTaskServer.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/db/SchedulerHandlerClient.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/local/LocalSchedulerHandlerClient.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/ExecuteInstanceScheduleHandler.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/GenerateInstanceScheduleHandler.java delete mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/local/LocalSchedulerHandler.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInfoService.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInfoServiceImpl.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInstanceServiceImpl.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerJobServiceImpl.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerTaskServiceImpl.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInfoServiceImpl.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInfoRepository.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInstanceRepository.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerJobRepository.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerTaskRepository.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/ComputingEngineAsyncTask.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagAlignmentAsyncTask.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagExtractorAsyncTask.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagSplitterAsyncTask.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagVectorizerAsyncTask.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagWriterAsyncTask.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/sync/builder/KagReaderSyncTask.java create mode 100644 server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/builder/KagBuilderTranslate.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/AccountDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/BuilderJobDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/ConfigDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/DataSourceDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/PermissionDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInfoDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInstanceDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerJobDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerTaskDO.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/AccountMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/BuilderJobDOMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ConfigMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/DataSourceDOMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/PermissionMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInfoDOMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInstanceDOMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerJobDOMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerTaskDOMapper.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/AccountRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/BuilderJobRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ConfigRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/DataSourceRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/PermissionRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/AccountConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/BuilderJobConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/ConfigConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/DataSourceConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/PermissionConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInfoRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInstanceRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerJobRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerTaskRepositoryImpl.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInfoConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInstanceConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerJobConvertor.java create mode 100644 server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerTaskConvertor.java create mode 100644 server/infra/dao/src/main/resources/mapper/AccountMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/BuilderJobDOMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/ConfigMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/DataSourceDOMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/PermissionMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/SchedulerInfoDOMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/SchedulerInstanceDOMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/SchedulerJobDOMapper.xml create mode 100644 server/infra/dao/src/main/resources/mapper/SchedulerTaskDOMapper.xml delete mode 100644 server/test/src/test/java/com/antgroup/openspg/test/kgschema/SPGSchemaFacadeTest.groovy diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/LogicalPlan.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/LogicalPlan.java index d4efee44f..07ef83fee 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/LogicalPlan.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/LogicalPlan.java @@ -92,6 +92,9 @@ private static BaseLogicalNode parse(Node node) { case BUILDER_INDEX: return new BuilderIndexNode( node.getId(), node.getName(), (BuilderIndexNodeConfig) node.getNodeConfig()); + case PYTHON: + return new PythonNode( + node.getId(), node.getName(), (PythonNodeConfig) node.getNodeConfig()); case PARAGRAPH_SPLIT: return new ParagraphSplitNode( node.getId(), node.getName(), (ParagraphSplitNodeConfig) node.getNodeConfig()); diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/PythonNode.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/PythonNode.java new file mode 100644 index 000000000..820d37185 --- /dev/null +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/logical/PythonNode.java @@ -0,0 +1,24 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.builder.core.logical; + +import com.antgroup.openspg.builder.model.pipeline.config.PythonNodeConfig; +import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum; + +public class PythonNode extends BaseLogicalNode { + + public PythonNode(String id, String name, PythonNodeConfig nodeConfig) { + super(id, name, NodeTypeEnum.PYTHON, nodeConfig); + } +} diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/PhysicalPlan.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/PhysicalPlan.java index fbbfddc9d..93c7723c9 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/PhysicalPlan.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/PhysicalPlan.java @@ -97,6 +97,9 @@ private static BaseProcessor parse(BaseLogicalNode node) { case BUILDER_INDEX: return new BuilderIndexProcessor( node.getId(), node.getName(), (BuilderIndexNodeConfig) node.getNodeConfig()); + case PYTHON: + return new PythonProcessor( + node.getId(), node.getName(), (PythonNodeConfig) node.getNodeConfig()); case LLM_NL_EXTRACT: return new LLMNlExtractProcessor( node.getId(), node.getName(), (LLMNlExtractNodeConfig) node.getNodeConfig()); diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/operator/PythonOperatorFactory.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/operator/PythonOperatorFactory.java index 16131c644..b8b0fe790 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/operator/PythonOperatorFactory.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/operator/PythonOperatorFactory.java @@ -15,20 +15,17 @@ import com.antgroup.openspg.builder.core.runtime.BuilderContext; import com.antgroup.openspg.builder.model.pipeline.config.OperatorConfig; -import java.util.*; -import java.util.stream.Collectors; +import com.antgroup.openspg.common.util.pemja.PemjaUtils; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang3.StringUtils; -import pemja.core.PythonInterpreter; -import pemja.core.PythonInterpreterConfig; @Slf4j public class PythonOperatorFactory implements OperatorFactory { private String pythonExec; - private String[] pythonPaths; - private String pythonKnextPath; + private String pythonPaths; + private String hostAddr; + private Long projectId; private PythonOperatorFactory() {} @@ -36,31 +33,13 @@ public static OperatorFactory getInstance() { return new PythonOperatorFactory(); } - private PythonInterpreter newPythonInterpreter() { - - PythonInterpreterConfig.PythonInterpreterConfigBuilder builder = - PythonInterpreterConfig.newBuilder(); - if (pythonExec != null) { - builder.setPythonExec(pythonExec); - } - if (pythonPaths != null) { - builder.addPythonPaths(pythonPaths); - } - return new PythonInterpreter(builder.build()); - } - @Override public void init(BuilderContext context) { pythonExec = context.getPythonExec(); - pythonPaths = (context.getPythonPaths() != null ? context.getPythonPaths().split(";") : null); - pythonKnextPath = context.getPythonKnextPath(); - log.info("pythonExec={}, pythonPaths={}", pythonExec, Arrays.toString(pythonPaths)); - } - - public PythonInterpreter getPythonInterpreter(OperatorConfig config) { - PythonInterpreter interpreter = newPythonInterpreter(); - loadOperatorObject(config, interpreter); - return interpreter; + pythonPaths = context.getPythonPaths(); + hostAddr = context.getSchemaUrl(); + projectId = context.getProjectId(); + log.info("pythonExec={}, pythonPaths={}", pythonExec, pythonPaths); } @Override @@ -68,46 +47,17 @@ public void loadOperator(OperatorConfig config) {} @Override public Object invoke(OperatorConfig config, Object... input) { - PythonInterpreter interpreter = getPythonInterpreter(config); - String pythonObject = getPythonOperatorObject(config); - try { - return interpreter.invokeMethod(pythonObject, config.getMethod(), input); - } finally { - interpreter.close(); - } - } - - private void loadOperatorObject(OperatorConfig config, PythonInterpreter interpreter) { - if (StringUtils.isNotBlank(pythonKnextPath)) { - interpreter.exec(String.format("import sys; sys.path.append(\"%s\")", pythonKnextPath)); - } - String pythonOperatorObject = getPythonOperatorObject(config); - interpreter.exec( - String.format("from %s import %s", config.getModulePath(), config.getClassName())); - interpreter.exec( - String.format( - "%s=%s(%s)", - pythonOperatorObject, + PemjaConfig pemjaConfig = + new PemjaConfig( + pythonExec, + pythonPaths, + hostAddr, + projectId, + config.getModulePath(), config.getClassName(), - paramToPythonString(config.getParams(), config.getParamsPrefix()))); - } - - private String getPythonOperatorObject(OperatorConfig config) { - String pythonOperatorObject = config.getClassName() + "_" + config.getUniqueKey(); - return pythonOperatorObject; - } - - private String paramToPythonString(Map params, String paramsPrefix) { - if (MapUtils.isEmpty(params)) { - return ""; - } - if (StringUtils.isBlank(paramsPrefix)) { - paramsPrefix = ""; - } - String keyValue = - params.entrySet().stream() - .map(entry -> String.format("'%s': '%s'", entry.getKey(), entry.getValue())) - .collect(Collectors.joining(",")); - return String.format("%s{%s}", paramsPrefix, keyValue); + config.getMethod(), + config.getParams(), + config.getParamsPrefix()); + return PemjaUtils.invoke(pemjaConfig, input); } } diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/BuilderIndexProcessor.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/BuilderIndexProcessor.java index 658c9f62b..afc745213 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/BuilderIndexProcessor.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/BuilderIndexProcessor.java @@ -35,7 +35,7 @@ public class BuilderIndexProcessor extends BaseProcessor { - private ExecuteNode node; + private ExecuteNode node = new ExecuteNode(); private SearchEngineClient searchEngineClient; private CacheClient cacheClient; @@ -48,7 +48,9 @@ public void doInit(BuilderContext context) throws BuilderException { super.doInit(context); searchEngineClient = SearchEngineClientDriverManager.getClient(context.getSearchEngineUrl()); cacheClient = CacheClientDriverManager.getClient(context.getCacheUrl()); - this.node = context.getExecuteNodes().get(getId()); + if (context.getExecuteNodes() != null) { + this.node = context.getExecuteNodes().get(getId()); + } } @Override diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ExtractPostProcessor.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ExtractPostProcessor.java index 3e753498a..8c8aeb3d4 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ExtractPostProcessor.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ExtractPostProcessor.java @@ -14,6 +14,8 @@ package com.antgroup.openspg.builder.core.physical.process; import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; import com.antgroup.openspg.builder.core.runtime.BuilderContext; import com.antgroup.openspg.builder.model.exception.BuilderException; import com.antgroup.openspg.builder.model.pipeline.ExecuteNode; @@ -21,14 +23,14 @@ import com.antgroup.openspg.builder.model.pipeline.enums.StatusEnum; import com.antgroup.openspg.builder.model.record.BaseRecord; import com.antgroup.openspg.builder.model.record.SubGraphRecord; -import com.google.common.collect.Lists; +import com.antgroup.openspg.common.constants.BuilderConstant; import java.util.ArrayList; import java.util.List; import java.util.Map; public class ExtractPostProcessor extends BasePythonProcessor { - private ExecuteNode node; + private ExecuteNode node = new ExecuteNode(); public ExtractPostProcessor(String id, String name, ExtractPostProcessorNodeConfig config) { super(id, name, config); @@ -37,39 +39,42 @@ public ExtractPostProcessor(String id, String name, ExtractPostProcessorNodeConf @Override public void doInit(BuilderContext context) throws BuilderException { super.doInit(context); - this.node = context.getExecuteNodes().get(getId()); + if (context.getExecuteNodes() != null) { + this.node = context.getExecuteNodes().get(getId()); + } } @Override public List process(List inputs) { node.setStatus(StatusEnum.RUNNING); - node.addTraceLog("Start post processor..."); + JSONObject pyConfig = new JSONObject(); + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.BASE); + node.addTraceLog("Start alignment..."); List results = new ArrayList<>(); - List lists = Lists.newArrayList(); for (BaseRecord record : inputs) { SubGraphRecord spgRecord = (SubGraphRecord) record; - lists.add(mapper.convertValue(spgRecord, Map.class)); + Map map = mapper.convertValue(spgRecord, Map.class); + node.addTraceLog("invoke alignment operator:%s", config.getOperatorConfig().getClassName()); + List result = + (List) + operatorFactory.invoke( + config.getOperatorConfig(), + BuilderConstant.POSTPROCESSOR_ABC, + pyConfig.toJSONString(), + map); + node.addTraceLog( + "invoke alignment operator:%s succeed", config.getOperatorConfig().getClassName()); + List records = + JSON.parseObject(JSON.toJSONString(result), new TypeReference>() {}); + for (SubGraphRecord subGraph : records) { + node.addTraceLog( + "alignment succeed node:%s edge%s", + subGraph.getResultNodes().size(), subGraph.getResultEdges().size()); + results.add(subGraph); + } } - - node.addTraceLog( - "invoke post processor operator:%s", config.getOperatorConfig().getClassName()); - Object result = operatorFactory.invoke(config.getOperatorConfig(), lists); - node.addTraceLog( - "invoke post processor operator:%s succeed", config.getOperatorConfig().getClassName()); - SubGraphRecord subGraph = JSON.parseObject(JSON.toJSONString(result), SubGraphRecord.class); - node.addTraceLog( - "post processor succeed node:%s edge%s", - subGraph.getResultNodes().size(), subGraph.getResultEdges().size()); - - /*ProjectSchema projectSchema = CommonUtils.getProjectSchema(context); - List nodes = CommonUtils.convertNodes(subGraph, projectSchema); - List edges = CommonUtils.convertEdges(subGraph, projectSchema); - results.addAll(nodes); - results.addAll(edges);*/ - results.add(subGraph); - node.addTraceLog("post processor complete..."); - node.setOutputs(subGraph); + node.addTraceLog("alignment complete..."); node.setStatus(StatusEnum.FINISH); return results; } diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/LLMNlExtractProcessor.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/LLMNlExtractProcessor.java index 7ae573c4d..6c0bef065 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/LLMNlExtractProcessor.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/LLMNlExtractProcessor.java @@ -14,6 +14,7 @@ package com.antgroup.openspg.builder.core.physical.process; import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.TypeReference; import com.antgroup.openspg.builder.core.physical.operator.OperatorFactory; import com.antgroup.openspg.builder.core.runtime.BuilderContext; @@ -24,6 +25,9 @@ import com.antgroup.openspg.builder.model.record.BaseRecord; import com.antgroup.openspg.builder.model.record.ChunkRecord; import com.antgroup.openspg.builder.model.record.SubGraphRecord; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.server.common.model.CommonConstants; +import com.antgroup.openspg.server.common.model.project.Project; import com.fasterxml.jackson.databind.ObjectMapper; import java.util.ArrayList; import java.util.List; @@ -40,7 +44,8 @@ @Slf4j public class LLMNlExtractProcessor extends BasePythonProcessor { - private ExecuteNode node; + private ExecuteNode node = new ExecuteNode(); + private Project project; private static final RejectedExecutionHandler handler = (r, executor) -> { @@ -60,7 +65,10 @@ public LLMNlExtractProcessor(String id, String name, LLMNlExtractNodeConfig conf @Override public void doInit(BuilderContext context) throws BuilderException { super.doInit(context); - this.node = context.getExecuteNodes().get(getId()); + if (context.getExecuteNodes() != null) { + this.node = context.getExecuteNodes().get(getId()); + } + project = JSON.parseObject(context.getProject(), Project.class); if (executor == null) { executor = new ThreadPoolExecutor( @@ -84,7 +92,8 @@ public List process(List inputs) { for (BaseRecord record : inputs) { ChunkRecord chunkRecord = (ChunkRecord) record; Future> future = - executor.submit(new ExtractTaskCallable(node, chunkRecord, operatorFactory, config)); + executor.submit( + new ExtractTaskCallable(node, chunkRecord, operatorFactory, config, project)); futures.add(future); } @@ -106,32 +115,44 @@ static class ExtractTaskCallable implements Callable> { private final ChunkRecord chunkRecord; private final OperatorFactory operatorFactory; private final LLMNlExtractNodeConfig config; + private final Project project; public ExtractTaskCallable( ExecuteNode node, ChunkRecord chunkRecord, OperatorFactory operatorFactory, - LLMNlExtractNodeConfig config) { + LLMNlExtractNodeConfig config, + Project project) { this.chunkRecord = chunkRecord; this.node = node; this.operatorFactory = operatorFactory; this.config = config; + this.project = project; } @Override public List call() throws Exception { ChunkRecord.Chunk chunk = chunkRecord.getChunk(); String names = chunk.getName(); + String projectConfig = project.getConfig(); + JSONObject llm = JSONObject.parseObject(projectConfig).getJSONObject(CommonConstants.LLM); + JSONObject pyConfig = new JSONObject(); + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.SCHEMA_FREE); + pyConfig.put(BuilderConstant.LLM, llm); node.addTraceLog( "invoke extract operator:%s chunk:%s", config.getOperatorConfig().getClassName(), names); - Map record = new ObjectMapper().convertValue(chunk, Map.class); - log.info("LLMNlExtractProcessor invoke Chunks: {}", names); List result = - (List) operatorFactory.invoke(config.getOperatorConfig(), record); + (List) + operatorFactory.invoke( + config.getOperatorConfig(), + BuilderConstant.EXTRACTOR_ABC, + pyConfig.toJSONString(), + record); List records = JSON.parseObject(JSON.toJSONString(result), new TypeReference>() {}); + // CommonUtils.addLabelPrefix(project.getNamespace(), records); node.addTraceLog( "invoke extract operator:%s chunk:%s succeed", config.getOperatorConfig().getClassName(), names); diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ParagraphSplitProcessor.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ParagraphSplitProcessor.java index 5aad80271..ceab4fa8c 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ParagraphSplitProcessor.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/ParagraphSplitProcessor.java @@ -14,7 +14,9 @@ package com.antgroup.openspg.builder.core.physical.process; import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.TypeReference; +import com.antgroup.openspg.builder.core.physical.utils.CommonUtils; import com.antgroup.openspg.builder.core.runtime.BuilderContext; import com.antgroup.openspg.builder.model.exception.BuilderException; import com.antgroup.openspg.builder.model.pipeline.ExecuteNode; @@ -23,20 +25,18 @@ import com.antgroup.openspg.builder.model.record.BaseRecord; import com.antgroup.openspg.builder.model.record.ChunkRecord; import com.antgroup.openspg.builder.model.record.StringRecord; -import com.antgroup.openspg.common.util.Md5Utils; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import com.antgroup.openspg.server.common.model.project.Project; import com.fasterxml.jackson.databind.ObjectMapper; import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.UUID; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.lang3.StringUtils; -import pemja.core.PythonInterpreter; -import pemja.core.PythonInterpreterConfig; public class ParagraphSplitProcessor extends BasePythonProcessor { - private ExecuteNode node; + private ExecuteNode node = new ExecuteNode(); + private Project project; public ParagraphSplitProcessor(String id, String name, ParagraphSplitNodeConfig config) { super(id, name, config); @@ -45,7 +45,10 @@ public ParagraphSplitProcessor(String id, String name, ParagraphSplitNodeConfig @Override public void doInit(BuilderContext context) throws BuilderException { super.doInit(context); - this.node = context.getExecuteNodes().get(getId()); + if (context.getExecuteNodes() != null) { + this.node = context.getExecuteNodes().get(getId()); + } + project = JSON.parseObject(context.getProject(), Project.class); } @Override @@ -53,24 +56,32 @@ public List process(List inputs) { node.setStatus(StatusEnum.RUNNING); node.addTraceLog("Start split document..."); List results = new ArrayList<>(); + JSONObject pyConfig = new JSONObject(); + JSONObject extension = JSON.parseObject(config.getExtension()); + CommonUtils.getSplitterConfig( + pyConfig, + context.getPythonExec(), + context.getPythonPaths(), + context.getSchemaUrl(), + project, + extension); for (BaseRecord record : inputs) { StringRecord stringRecord = (StringRecord) record; String fileUrl = stringRecord.getDocument(); - List chunks; String token = config.getToken(); - if (StringUtils.isNotBlank(token)) { - chunks = readYuque(fileUrl, token); - } else { - chunks = readFile(fileUrl); - } - + List chunks = readSource(fileUrl, token); node.addTraceLog("invoke split operator:%s", config.getOperatorConfig().getClassName()); for (ChunkRecord.Chunk chunk : chunks) { node.addTraceLog("invoke split chunk:%s", chunk.getName()); Map map = new ObjectMapper().convertValue(chunk, Map.class); List result = - (List) operatorFactory.invoke(config.getOperatorConfig(), map); + (List) + operatorFactory.invoke( + config.getOperatorConfig(), + BuilderConstant.SPLITTER_ABC, + pyConfig.toJSONString(), + map); List chunkList = JSON.parseObject( JSON.toJSONString(result), new TypeReference>() {}); @@ -89,83 +100,19 @@ public List process(List inputs) { return results; } - public List readYuque(String url, String token) { - PythonInterpreterConfig.PythonInterpreterConfigBuilder builder = - PythonInterpreterConfig.newBuilder(); - builder.setPythonExec(context.getPythonExec()); - builder.addPythonPaths(context.getPythonPaths()); - PythonInterpreter pythonInterpreter = new PythonInterpreter(builder.build()); - try { - if (StringUtils.isNotBlank(context.getPythonKnextPath())) { - pythonInterpreter.exec( - String.format("import sys; sys.path.append(\"%s\")", context.getPythonKnextPath())); - } - String className = "YuqueReader"; - node.addTraceLog("invoke chunk operator:%s", className); - pythonInterpreter.exec("from kag.builder.component.reader import " + className); - String pythonObject = "pyo" + "_" + Md5Utils.md5Of(UUID.randomUUID().toString()); - pythonInterpreter.exec( - String.format( - "%s=%s(**{'token' : '%s','project_id' : '%s'})", - pythonObject, className, token, context.getProjectId())); - List result = - (List) pythonInterpreter.invokeMethod(pythonObject, "_handle", url); - List chunkList = - JSON.parseObject( - JSON.toJSONString(result), new TypeReference>() {}); - node.addTraceLog("invoke chunk operator:%s chunks:%s succeed", className, chunkList.size()); - return chunkList; - } finally { - pythonInterpreter.close(); - } - } - - public List readFile(String fileUrl) { - PythonInterpreterConfig.PythonInterpreterConfigBuilder builder = - PythonInterpreterConfig.newBuilder(); - builder.setPythonExec(context.getPythonExec()); - builder.addPythonPaths(context.getPythonPaths()); - PythonInterpreter pythonInterpreter = new PythonInterpreter(builder.build()); - try { - if (StringUtils.isNotBlank(context.getPythonKnextPath())) { - pythonInterpreter.exec( - String.format("import sys; sys.path.append(\"%s\")", context.getPythonKnextPath())); - } - String extension = FilenameUtils.getExtension(fileUrl).toLowerCase(); - String className = "TXTReader"; - switch (extension) { - case "csv": - className = "CSVReader"; - break; - case "pdf": - className = "PDFReader"; - break; - case "md": - className = "MarkDownReader"; - break; - case "json": - className = "JSONReader"; - break; - case "doc": - case "docx": - className = "DocxReader"; - break; - } - node.addTraceLog("invoke chunk operator:%s", className); - pythonInterpreter.exec("from kag.builder.component.reader import " + className); - String pythonObject = "pyo" + "_" + Md5Utils.md5Of(UUID.randomUUID().toString()); - pythonInterpreter.exec( - String.format( - "%s=%s(**{'project_id' : '%s'})", pythonObject, className, context.getProjectId())); - List result = - (List) pythonInterpreter.invokeMethod(pythonObject, "_handle", fileUrl); - List chunkList = - JSON.parseObject( - JSON.toJSONString(result), new TypeReference>() {}); - node.addTraceLog("invoke chunk operator:%s chunks:%s succeed", className, chunkList.size()); - return chunkList; - } finally { - pythonInterpreter.close(); - } + public List readSource(String url, String token) { + node.addTraceLog("invoke read operator:%s", PythonInvokeMethod.BRIDGE_READER.getMethod()); + List chunkList = + CommonUtils.readSource( + context.getPythonExec(), + context.getPythonPaths(), + context.getSchemaUrl(), + project, + url, + token); + node.addTraceLog( + "invoke read operator:%s chunks:%s succeed", + PythonInvokeMethod.BRIDGE_READER.getMethod(), chunkList.size()); + return chunkList; } } diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/PythonProcessor.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/PythonProcessor.java new file mode 100644 index 000000000..8bda8e990 --- /dev/null +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/PythonProcessor.java @@ -0,0 +1,63 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.builder.core.physical.process; + +import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.builder.core.runtime.BuilderContext; +import com.antgroup.openspg.builder.model.exception.BuilderException; +import com.antgroup.openspg.builder.model.pipeline.config.PythonNodeConfig; +import com.antgroup.openspg.builder.model.record.BaseRecord; +import com.antgroup.openspg.builder.model.record.StringRecord; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.common.util.pemja.PemjaUtils; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import java.util.List; +import java.util.Map; + +public class PythonProcessor extends BaseProcessor { + + PemjaConfig config; + + public PythonProcessor(String id, String name, PythonNodeConfig config) { + super(id, name, config); + this.config = config.getPemjaConfig(); + } + + @Override + public void doInit(BuilderContext context) throws BuilderException { + super.doInit(context); + if (StringUtils.isBlank(config.getPythonPaths())) { + config.setPythonPaths(context.getPythonPaths()); + } + if (StringUtils.isBlank(config.getPythonExec())) { + config.setPythonExec(context.getPythonExec()); + } + } + + @Override + public void close() throws Exception {} + + @Override + public List process(List inputs) { + List outputs = Lists.newArrayList(); + for (BaseRecord record : inputs) { + Map map = new ObjectMapper().convertValue(record, Map.class); + List results = (List) PemjaUtils.invoke(config, map); + results.forEach(result -> outputs.add(new StringRecord(JSON.toJSONString(result)))); + } + return outputs; + } +} diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/VectorizerProcessor.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/VectorizerProcessor.java index e2bcbbdf9..69648b425 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/VectorizerProcessor.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/process/VectorizerProcessor.java @@ -14,6 +14,7 @@ package com.antgroup.openspg.builder.core.physical.process; import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.TypeReference; import com.antgroup.openspg.builder.core.runtime.BuilderContext; import com.antgroup.openspg.builder.model.exception.BuilderException; @@ -22,6 +23,9 @@ import com.antgroup.openspg.builder.model.pipeline.enums.StatusEnum; import com.antgroup.openspg.builder.model.record.BaseRecord; import com.antgroup.openspg.builder.model.record.SubGraphRecord; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.server.common.model.CommonConstants; +import com.antgroup.openspg.server.common.model.project.Project; import com.google.common.collect.Lists; import java.util.ArrayList; import java.util.List; @@ -29,7 +33,8 @@ public class VectorizerProcessor extends BasePythonProcessor { - private ExecuteNode node; + private ExecuteNode node = new ExecuteNode(); + private Project project; public VectorizerProcessor(String id, String name, VectorizerProcessorNodeConfig config) { super(id, name, config); @@ -38,12 +43,21 @@ public VectorizerProcessor(String id, String name, VectorizerProcessorNodeConfig @Override public void doInit(BuilderContext context) throws BuilderException { super.doInit(context); - this.node = context.getExecuteNodes().get(getId()); + if (context.getExecuteNodes() != null) { + this.node = context.getExecuteNodes().get(getId()); + } + project = JSON.parseObject(context.getProject(), Project.class); } @Override public List process(List inputs) { node.setStatus(StatusEnum.RUNNING); + String projectConfig = project.getConfig(); + JSONObject vec = + JSONObject.parseObject(projectConfig).getJSONObject(CommonConstants.VECTORIZER); + JSONObject pyConfig = new JSONObject(); + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.BATCH); + pyConfig.put(BuilderConstant.VECTORIZE_MODEL, vec); node.addTraceLog("Start vectorizer processor..."); List results = new ArrayList<>(); SubGraphRecord subGraph = new SubGraphRecord(Lists.newArrayList(), Lists.newArrayList()); @@ -56,7 +70,13 @@ public List process(List inputs) { Map map = mapper.convertValue(spgRecord, Map.class); node.addTraceLog( "invoke vectorizer processor operator:%s", config.getOperatorConfig().getClassName()); - List result = (List) operatorFactory.invoke(config.getOperatorConfig(), map); + List result = + (List) + operatorFactory.invoke( + config.getOperatorConfig(), + BuilderConstant.VECTORIZER_ABC, + pyConfig.toJSONString(), + map); node.addTraceLog( "invoke vectorizer processor operator:%s succeed", config.getOperatorConfig().getClassName()); diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/utils/CommonUtils.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/utils/CommonUtils.java index 66eb0f9b1..9a18a99a9 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/utils/CommonUtils.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/physical/utils/CommonUtils.java @@ -13,15 +13,24 @@ package com.antgroup.openspg.builder.core.physical.utils; +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; import com.antgroup.openspg.builder.core.runtime.BuilderContext; import com.antgroup.openspg.builder.model.exception.PipelineConfigException; import com.antgroup.openspg.builder.model.record.BaseSPGRecord; +import com.antgroup.openspg.builder.model.record.ChunkRecord; import com.antgroup.openspg.builder.model.record.RelationRecord; import com.antgroup.openspg.builder.model.record.SPGRecordTypeEnum; import com.antgroup.openspg.builder.model.record.SubGraphRecord; import com.antgroup.openspg.builder.model.record.property.SPGPropertyRecord; import com.antgroup.openspg.cloudext.interfaces.graphstore.adapter.util.EdgeRecordConvertor; import com.antgroup.openspg.cloudext.interfaces.graphstore.adapter.util.VertexRecordConvertor; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.pemja.PemjaUtils; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; import com.antgroup.openspg.core.schema.model.BasicInfo; import com.antgroup.openspg.core.schema.model.identifier.RelationIdentifier; import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier; @@ -37,11 +46,17 @@ import com.antgroup.openspg.server.api.http.client.HttpSchemaFacade; import com.antgroup.openspg.server.api.http.client.util.ConnectionInfo; import com.antgroup.openspg.server.api.http.client.util.HttpClientBootstrap; +import com.antgroup.openspg.server.common.model.CommonConstants; +import com.antgroup.openspg.server.common.model.project.Project; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.lang3.StringUtils; public class CommonUtils { @@ -100,6 +115,30 @@ public static String labelPrefix(String namespace, String label) { return namespace + DOT + label; } + public static void addLabelPrefix(String namespace, List records) { + records.forEach( + record -> { + List resultNodes = record.getResultNodes(); + if (resultNodes != null) { + resultNodes.forEach( + resultNode -> { + String label = CommonUtils.labelPrefix(namespace, resultNode.getLabel()); + resultNode.setLabel(label); + }); + } + List resultEdges = record.getResultEdges(); + if (resultEdges != null) { + resultEdges.forEach( + resultEdge -> { + String fromType = CommonUtils.labelPrefix(namespace, resultEdge.getFromType()); + String toType = CommonUtils.labelPrefix(namespace, resultEdge.getToType()); + resultEdge.setFromType(fromType); + resultEdge.setToType(toType); + }); + } + }); + } + public static List convertEdges( SubGraphRecord subGraph, ProjectSchema projectSchema, String namespace) { List resultEdges = subGraph.getResultEdges(); @@ -147,4 +186,115 @@ private static void replaceUnSpreadableStandardProperty(BaseSPGRecord record) { property.getValue().setSingleStd(property.getValue().getRaw()); }); } + + public static List readSource( + String pythonExec, + String pythonPaths, + String hostAddr, + Project project, + String url, + String token) { + PythonInvokeMethod bridgeReader = PythonInvokeMethod.BRIDGE_READER; + Long projectId = project.getId(); + JSONObject llm = JSONObject.parseObject(project.getConfig()).getJSONObject(CommonConstants.LLM); + JSONObject pyConfig = new JSONObject(); + JSONObject scanner = new JSONObject(); + pyConfig.put(BuilderConstant.SCANNER, scanner); + JSONObject reader = new JSONObject(); + pyConfig.put(BuilderConstant.READER, reader); + + if (StringUtils.isNotBlank(token)) { + scanner.put(BuilderConstant.TYPE, BuilderConstant.YU_QUE); + scanner.put(BuilderConstant.TOKEN, token); + reader.put(BuilderConstant.TYPE, BuilderConstant.YU_QUE); + reader.put(BuilderConstant.CUT_DEPTH, 1); + } else { + String extension = FilenameUtils.getExtension(url).toLowerCase(); + switch (extension) { + case BuilderConstant.CSV: + scanner.put(BuilderConstant.TYPE, BuilderConstant.CSV); + scanner.put(BuilderConstant.HEADER, true); + JSONArray colNames = new JSONArray(); + colNames.add(BuilderConstant.CONTENT); + scanner.put(BuilderConstant.COL_NAMES, colNames); + reader.put(BuilderConstant.TYPE, BuilderConstant.DICT); + break; + case BuilderConstant.JSON: + scanner.put(BuilderConstant.TYPE, BuilderConstant.JSON); + reader.put(BuilderConstant.TYPE, BuilderConstant.DICT); + reader.put(BuilderConstant.ID_COL, BuilderConstant.ID); + reader.put(BuilderConstant.NAME_COL, BuilderConstant.NAME); + reader.put(BuilderConstant.CONTENT_COL, BuilderConstant.CONTENT); + break; + case BuilderConstant.TXT: + scanner.put(BuilderConstant.TYPE, BuilderConstant.FILE); + reader.put(BuilderConstant.TYPE, BuilderConstant.TXT); + break; + case BuilderConstant.PDF: + scanner.put(BuilderConstant.TYPE, BuilderConstant.FILE); + reader.put(BuilderConstant.TYPE, BuilderConstant.PDF); + reader.put(BuilderConstant.CUT_DEPTH, 1); + reader.put(BuilderConstant.LLM, llm); + break; + case BuilderConstant.MD: + scanner.put(BuilderConstant.TYPE, BuilderConstant.FILE); + reader.put(BuilderConstant.TYPE, BuilderConstant.MD); + reader.put(BuilderConstant.CUT_DEPTH, 1); + reader.put(BuilderConstant.LLM, llm); + break; + case BuilderConstant.DOC: + case BuilderConstant.DOCX: + scanner.put(BuilderConstant.TYPE, BuilderConstant.FILE); + reader.put(BuilderConstant.TYPE, BuilderConstant.DOCX); + reader.put(BuilderConstant.LLM, llm); + break; + } + } + PemjaConfig config = + new PemjaConfig( + pythonExec, pythonPaths, hostAddr, projectId, bridgeReader, Maps.newHashMap()); + List result; + if (StringUtils.isNotBlank(token)) { + List urls = Lists.newArrayList(); + urls.add(url); + result = (List) PemjaUtils.invoke(config, pyConfig.toJSONString(), urls); + } else { + result = (List) PemjaUtils.invoke(config, pyConfig.toJSONString(), url); + } + List chunkList = + JSON.parseObject( + JSON.toJSONString(result), new TypeReference>() {}); + return chunkList; + } + + public static PemjaConfig getSplitterConfig( + JSONObject pyConfig, + String pythonExec, + String pythonPaths, + String hostAddr, + Project project, + JSONObject builderExtension) { + Long projectId = project.getId(); + JSONObject llm = JSONObject.parseObject(project.getConfig()).getJSONObject(CommonConstants.LLM); + JSONObject config = builderExtension.getJSONObject(BuilderConstant.SPLIT_CONFIG); + Boolean semanticSplit = config.getBoolean(BuilderConstant.SEMANTIC_SPLIT); + + Long splitLength = config.getLong(BuilderConstant.SPLIT_LENGTH); + + PythonInvokeMethod splitter = PythonInvokeMethod.BRIDGE_COMPONENT; + if (semanticSplit != null && semanticSplit) { + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.SEMANTIC); + pyConfig.put(BuilderConstant.LLM, llm); + pyConfig.put(BuilderConstant.PY_SPLIT_LENGTH, splitLength); + } else { + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.LENGTH); + pyConfig.put(BuilderConstant.PY_SPLIT_LENGTH, splitLength); + pyConfig.put(BuilderConstant.PY_WINDOW_LENGTH, 0); + } + + PemjaConfig pemjaConfig = + new PemjaConfig(pythonExec, pythonPaths, hostAddr, projectId, splitter, Maps.newHashMap()); + + return pemjaConfig; + } } diff --git a/builder/core/src/main/java/com/antgroup/openspg/builder/core/runtime/BuilderContext.java b/builder/core/src/main/java/com/antgroup/openspg/builder/core/runtime/BuilderContext.java index f7623df0a..6cf5bb06a 100644 --- a/builder/core/src/main/java/com/antgroup/openspg/builder/core/runtime/BuilderContext.java +++ b/builder/core/src/main/java/com/antgroup/openspg/builder/core/runtime/BuilderContext.java @@ -47,5 +47,4 @@ public class BuilderContext implements Serializable { private Map executeNodes; private String schemaUrl; - private String pythonKnextPath; } diff --git a/builder/model/src/main/java/com/antgroup/openspg/builder/model/BuilderConstants.java b/builder/model/src/main/java/com/antgroup/openspg/builder/model/BuilderConstants.java new file mode 100644 index 000000000..bcd7d90b0 --- /dev/null +++ b/builder/model/src/main/java/com/antgroup/openspg/builder/model/BuilderConstants.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.builder.model; + +public class BuilderConstants { + + public static final String PROJECT_ID_OPTION = "projectId"; + public static final String JOB_NAME_OPTION = "jobName"; + public static final String PIPELINE_OPTION = "pipeline"; + public static final String PYTHON_EXEC_OPTION = "pythonExec"; + public static final String PYTHON_PATHS_OPTION = "pythonPaths"; + public static final String SCHEMA_URL_OPTION = "schemaUrl"; + public static final String PARALLELISM_OPTION = "parallelism"; + public static final String ALTER_OPERATION_OPTION = "alterOperation"; + public static final String LOG_FILE_OPTION = "logFile"; + public static final String LEAD_TO_OPTION = "leadTo"; + public static final String GRAPH_STORE_URL_OPTION = "graphStoreUrl"; + public static final String SEARCH_ENGINE_URL_OPTION = "searchEngineUrl"; + public static final String MODEL_EXECUTE_NUM_OPTION = "modelExecuteNum"; + public static final String PROJECT_OPTION = "project"; +} diff --git a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/ExecuteNode.java b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/ExecuteNode.java index 9ae8cd7f0..8ea02d4a3 100644 --- a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/ExecuteNode.java +++ b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/ExecuteNode.java @@ -19,9 +19,11 @@ import java.util.Date; import lombok.Getter; import lombok.Setter; +import lombok.extern.slf4j.Slf4j; @Getter @Setter +@Slf4j public class ExecuteNode extends BaseValObj { /** The id of the node. */ @@ -40,6 +42,8 @@ public class ExecuteNode extends BaseValObj { private StringBuffer traceLog; + public ExecuteNode() {} + public ExecuteNode(Node node) { this.id = node.getId(); this.name = node.getName(); @@ -51,6 +55,10 @@ public ExecuteNode(Node node) { public synchronized void addTraceLog(String message, Object... args) { message = String.format(message, args); String currentTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()); - traceLog.append(currentTime + ": " + message + System.getProperty("line.separator")); + if (traceLog == null) { + log.info("traceLog: " + message); + } else { + traceLog.append(currentTime + ": " + message + System.getProperty("line.separator")); + } } } diff --git a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/PipelineUtils.java b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/PipelineUtils.java new file mode 100644 index 000000000..652b885ab --- /dev/null +++ b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/PipelineUtils.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.builder.model.pipeline; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.builder.model.pipeline.config.ExtractPostProcessorNodeConfig; +import com.antgroup.openspg.builder.model.pipeline.config.LLMNlExtractNodeConfig; +import com.antgroup.openspg.builder.model.pipeline.config.Neo4jSinkNodeConfig; +import com.antgroup.openspg.builder.model.pipeline.config.OperatorConfig; +import com.antgroup.openspg.builder.model.pipeline.config.ParagraphSplitNodeConfig; +import com.antgroup.openspg.builder.model.pipeline.config.StringSourceNodeConfig; +import com.antgroup.openspg.builder.model.pipeline.config.predicting.VectorizerProcessorNodeConfig; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.util.List; +import java.util.UUID; + +public class PipelineUtils { + + public static Pipeline getKagDefaultPipeline(BuilderJob job) { + List nodes = Lists.newArrayList(); + List edges = Lists.newArrayList(); + String sourceId = UUID.randomUUID().toString(); + String fileUrl = job.getFileUrl(); + JSONObject extension = JSON.parseObject(job.getExtension()); + StringSourceNodeConfig sourceConfig = new StringSourceNodeConfig(fileUrl); + Node source = new Node(sourceId, "Reader", sourceConfig); + nodes.add(source); + + String splitId = UUID.randomUUID().toString(); + PythonInvokeMethod splitMethod = PythonInvokeMethod.BRIDGE_COMPONENT; + JSONObject config = extension.getJSONObject(BuilderConstant.YU_QUE_CONFIG); + String token = (config == null) ? null : config.getString(BuilderConstant.TOKEN); + OperatorConfig operatorConfigSplit = new OperatorConfig(splitMethod, Maps.newHashMap()); + Node split = + new Node( + splitId, + "Splitter", + new ParagraphSplitNodeConfig(operatorConfigSplit, token, job.getExtension())); + nodes.add(split); + edges.add(new Edge(sourceId, splitId)); + + String extractId = UUID.randomUUID().toString(); + PythonInvokeMethod extractMethod = PythonInvokeMethod.BRIDGE_COMPONENT; + OperatorConfig operatorConfig = new OperatorConfig(extractMethod, Maps.newHashMap()); + Node extract = new Node(extractId, "Extractor", new LLMNlExtractNodeConfig(operatorConfig)); + nodes.add(extract); + edges.add(new Edge(splitId, extractId)); + + String vectorizerId = UUID.randomUUID().toString(); + PythonInvokeMethod vectorizerMethod = PythonInvokeMethod.BRIDGE_COMPONENT; + OperatorConfig operatorConfigVectorizer = + new OperatorConfig(vectorizerMethod, Maps.newHashMap()); + Node vectorizerProcessor = + new Node( + vectorizerId, + "Vectorizer", + new VectorizerProcessorNodeConfig(operatorConfigVectorizer)); + nodes.add(vectorizerProcessor); + edges.add(new Edge(extractId, vectorizerId)); + + String alignmentId = UUID.randomUUID().toString(); + PythonInvokeMethod alignmentMethod = PythonInvokeMethod.BRIDGE_COMPONENT; + OperatorConfig operatorConfigAlignment = new OperatorConfig(alignmentMethod, Maps.newHashMap()); + Node alignmentProcessor = + new Node( + alignmentId, "Alignment", new ExtractPostProcessorNodeConfig(operatorConfigAlignment)); + nodes.add(alignmentProcessor); + edges.add(new Edge(vectorizerId, alignmentId)); + + String sinkId = UUID.randomUUID().toString(); + Boolean autoWrite = + extension + .getJSONObject(BuilderConstant.EXTRACT_CONFIG) + .getBoolean(BuilderConstant.AUTO_WRITE); + Node sink = new Node(sinkId, "Writer", new Neo4jSinkNodeConfig(autoWrite)); + nodes.add(sink); + edges.add(new Edge(alignmentId, sinkId)); + + Pipeline pipeline = new Pipeline(nodes, edges); + return pipeline; + } +} diff --git a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/OperatorConfig.java b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/OperatorConfig.java index 7ac927ec1..48efbf50a 100644 --- a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/OperatorConfig.java +++ b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/OperatorConfig.java @@ -14,6 +14,7 @@ package com.antgroup.openspg.builder.model.pipeline.config; import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; import com.antgroup.openspg.server.common.model.base.BaseValObj; import com.google.common.collect.Lists; import java.util.Map; @@ -25,8 +26,6 @@ @EqualsAndHashCode(callSuper = false) public class OperatorConfig extends BaseValObj { - private final String filePath; - private final String modulePath; private final String className; @@ -38,22 +37,25 @@ public class OperatorConfig extends BaseValObj { private final String paramsPrefix; public OperatorConfig( - String filePath, - String modulePath, - String className, - String method, - Map params) { - this(filePath, modulePath, className, method, params, ""); + String modulePath, String className, String method, Map params) { + this(modulePath, className, method, params, ""); + } + + public OperatorConfig(PythonInvokeMethod method, Map params) { + this( + method.getModulePath(), + method.getClassName(), + method.getMethod(), + params, + method.getParamsPrefix()); } public OperatorConfig( - String filePath, String modulePath, String className, String method, Map params, String paramsPrefix) { - this.filePath = filePath; this.modulePath = modulePath; this.className = className; this.method = method; @@ -66,6 +68,6 @@ public String getUniqueKey() { String.join( ";", Lists.newArrayList( - filePath, modulePath, className, method, JSON.toJSONString(params), paramsPrefix))); + modulePath, className, method, JSON.toJSONString(params), paramsPrefix))); } } diff --git a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/ParagraphSplitNodeConfig.java b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/ParagraphSplitNodeConfig.java index c2b9c3ea0..0d5e9451c 100644 --- a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/ParagraphSplitNodeConfig.java +++ b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/ParagraphSplitNodeConfig.java @@ -19,24 +19,12 @@ @Getter public class ParagraphSplitNodeConfig extends BasePythonNodeConfig { - private final String nl; - private final Boolean semanticSplit; - private final Long splitLength; - private final Boolean builderIndex; private final String token; + private final String extension; - public ParagraphSplitNodeConfig( - OperatorConfig operatorConfig, - String nl, - Boolean semanticSplit, - Long splitLength, - Boolean builderIndex, - String token) { + public ParagraphSplitNodeConfig(OperatorConfig operatorConfig, String token, String extension) { super(NodeTypeEnum.PARAGRAPH_SPLIT, operatorConfig); - this.nl = nl; - this.semanticSplit = semanticSplit; - this.splitLength = splitLength; - this.builderIndex = builderIndex; this.token = token; + this.extension = extension; } } diff --git a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/PythonNodeConfig.java b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/PythonNodeConfig.java new file mode 100644 index 000000000..b6aac5bc3 --- /dev/null +++ b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/config/PythonNodeConfig.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.builder.model.pipeline.config; + +import com.antgroup.openspg.builder.model.pipeline.enums.NodeTypeEnum; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; +import lombok.Getter; + +@Getter +public class PythonNodeConfig extends BaseNodeConfig { + + private final PemjaConfig pemjaConfig; + + public PythonNodeConfig(PemjaConfig pemjaConfig) { + super(NodeTypeEnum.PYTHON); + this.pemjaConfig = pemjaConfig; + } +} diff --git a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/enums/NodeTypeEnum.java b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/enums/NodeTypeEnum.java index 8ecc33afd..e507f8902 100644 --- a/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/enums/NodeTypeEnum.java +++ b/builder/model/src/main/java/com/antgroup/openspg/builder/model/pipeline/enums/NodeTypeEnum.java @@ -20,6 +20,7 @@ public enum NodeTypeEnum { PARAGRAPH_SPLIT, BUILDER_INDEX, + PYTHON, /** MAPPING Component */ RELATION_MAPPING, diff --git a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/LocalBuilderMain.java b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/LocalBuilderMain.java index a21a63e0f..ad2eb8b23 100644 --- a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/LocalBuilderMain.java +++ b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/LocalBuilderMain.java @@ -20,9 +20,11 @@ import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.ConsoleAppender; import ch.qos.logback.core.FileAppender; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.parser.ParserConfig; import com.antgroup.openspg.builder.core.runtime.BuilderContext; import com.antgroup.openspg.builder.core.runtime.impl.DefaultBuilderCatalog; -import com.antgroup.openspg.builder.model.BuilderJsonUtils; +import com.antgroup.openspg.builder.model.BuilderConstants; import com.antgroup.openspg.builder.model.exception.PipelineConfigException; import com.antgroup.openspg.builder.model.pipeline.Pipeline; import com.antgroup.openspg.builder.model.record.RecordAlterOperationEnum; @@ -50,19 +52,6 @@ @Slf4j public class LocalBuilderMain { - private static final String PROJECT_ID_OPTION = "projectId"; - private static final String JOB_NAME_OPTION = "jobName"; - private static final String PIPELINE_OPTION = "pipeline"; - private static final String PYTHON_EXEC_OPTION = "pythonExec"; - private static final String PYTHON_PATHS_OPTION = "pythonPaths"; - private static final String SCHEMA_URL_OPTION = "schemaUrl"; - private static final String PARALLELISM_OPTION = "parallelism"; - private static final String ALTER_OPERATION_OPTION = "alterOperation"; - private static final String LOG_FILE_OPTION = "logFile"; - private static final String LEAD_TO_OPTION = "leadTo"; - private static final String GRAPH_STORE_URL_OPTION = "graphStoreUrl"; - private static final String SEARCH_ENGINE_URL_OPTION = "searchEngineUrl"; - public static void main(String[] args) { CommandLine commandLine = parseArgs(args); try { @@ -77,21 +66,55 @@ public static CommandLine parseArgs(String[] args) { CommandLineParser parser = new DefaultParser(); Options options = new Options(); - options.addRequiredOption(PROJECT_ID_OPTION, PROJECT_ID_OPTION, true, "project id"); - options.addRequiredOption(JOB_NAME_OPTION, JOB_NAME_OPTION, true, "job name"); - options.addRequiredOption(PIPELINE_OPTION, PIPELINE_OPTION, true, "pipeline info"); - options.addRequiredOption(PYTHON_EXEC_OPTION, PYTHON_EXEC_OPTION, true, "python exec"); - options.addRequiredOption(PYTHON_PATHS_OPTION, PYTHON_PATHS_OPTION, true, "python path"); - options.addRequiredOption(SCHEMA_URL_OPTION, SCHEMA_URL_OPTION, true, "schema url"); - options.addOption(PARALLELISM_OPTION, PARALLELISM_OPTION, true, "parallelism"); + options.addRequiredOption( + BuilderConstants.PROJECT_ID_OPTION, BuilderConstants.PROJECT_ID_OPTION, true, "project id"); + options.addRequiredOption( + BuilderConstants.JOB_NAME_OPTION, BuilderConstants.JOB_NAME_OPTION, true, "job name"); + options.addRequiredOption( + BuilderConstants.PIPELINE_OPTION, BuilderConstants.PIPELINE_OPTION, true, "pipeline info"); + options.addRequiredOption( + BuilderConstants.PYTHON_EXEC_OPTION, + BuilderConstants.PYTHON_EXEC_OPTION, + true, + "python exec"); + options.addRequiredOption( + BuilderConstants.PYTHON_PATHS_OPTION, + BuilderConstants.PYTHON_PATHS_OPTION, + true, + "python path"); + options.addRequiredOption( + BuilderConstants.SCHEMA_URL_OPTION, BuilderConstants.SCHEMA_URL_OPTION, true, "schema url"); options.addOption( - ALTER_OPERATION_OPTION, ALTER_OPERATION_OPTION, true, "alter operation, upsert or delete"); - options.addOption(LOG_FILE_OPTION, LOG_FILE_OPTION, true, "log file"); - options.addOption(LEAD_TO_OPTION, LEAD_TO_OPTION, false, "enable leadTo"); + BuilderConstants.PARALLELISM_OPTION, + BuilderConstants.PARALLELISM_OPTION, + true, + "parallelism"); + options.addOption( + BuilderConstants.ALTER_OPERATION_OPTION, + BuilderConstants.ALTER_OPERATION_OPTION, + true, + "alter operation, upsert or delete"); + options.addOption( + BuilderConstants.LOG_FILE_OPTION, BuilderConstants.LOG_FILE_OPTION, true, "log file"); + options.addOption( + BuilderConstants.LEAD_TO_OPTION, BuilderConstants.LEAD_TO_OPTION, false, "enable leadTo"); + options.addRequiredOption( + BuilderConstants.GRAPH_STORE_URL_OPTION, + BuilderConstants.GRAPH_STORE_URL_OPTION, + true, + "graph store url"); options.addRequiredOption( - GRAPH_STORE_URL_OPTION, GRAPH_STORE_URL_OPTION, true, "graph store url"); + BuilderConstants.SEARCH_ENGINE_URL_OPTION, + BuilderConstants.SEARCH_ENGINE_URL_OPTION, + true, + "search engine url"); options.addRequiredOption( - SEARCH_ENGINE_URL_OPTION, SEARCH_ENGINE_URL_OPTION, true, "search engine url"); + BuilderConstants.PROJECT_OPTION, BuilderConstants.PROJECT_OPTION, true, "project"); + options.addOption( + BuilderConstants.MODEL_EXECUTE_NUM_OPTION, + BuilderConstants.MODEL_EXECUTE_NUM_OPTION, + true, + "model execute num"); CommandLine commandLine = null; HelpFormatter helper = new HelpFormatter(); @@ -105,29 +128,37 @@ public static CommandLine parseArgs(String[] args) { } private static void run(CommandLine commandLine) throws Exception { - String logFileName = commandLine.getOptionValue(LOG_FILE_OPTION); + ParserConfig.getGlobalInstance().setAutoTypeSupport(true); + String logFileName = commandLine.getOptionValue(BuilderConstants.LOG_FILE_OPTION); setUpLogFile(logFileName); - long projectId = Long.parseLong(commandLine.getOptionValue(PROJECT_ID_OPTION)); - String jobName = commandLine.getOptionValue(JOB_NAME_OPTION); + long projectId = Long.parseLong(commandLine.getOptionValue(BuilderConstants.PROJECT_ID_OPTION)); + String jobName = commandLine.getOptionValue(BuilderConstants.JOB_NAME_OPTION); - String pipelineStr = commandLine.getOptionValue(PIPELINE_OPTION); - Pipeline pipeline = BuilderJsonUtils.deserialize(pipelineStr, Pipeline.class); + String pipelineStr = commandLine.getOptionValue(BuilderConstants.PIPELINE_OPTION); + Pipeline pipeline = JSONObject.parseObject(pipelineStr, Pipeline.class); - String pythonExec = commandLine.getOptionValue(PYTHON_EXEC_OPTION); - String pythonPaths = commandLine.getOptionValue(PYTHON_PATHS_OPTION); - String schemaUrl = commandLine.getOptionValue(SCHEMA_URL_OPTION); + String pythonExec = commandLine.getOptionValue(BuilderConstants.PYTHON_EXEC_OPTION); + String pythonPaths = commandLine.getOptionValue(BuilderConstants.PYTHON_PATHS_OPTION); + String schemaUrl = commandLine.getOptionValue(BuilderConstants.SCHEMA_URL_OPTION); - String parallelismStr = commandLine.getOptionValue(PARALLELISM_OPTION); + String parallelismStr = commandLine.getOptionValue(BuilderConstants.PARALLELISM_OPTION); int parallelism = (parallelismStr == null ? 1 : Integer.parseInt(parallelismStr)); - String alterOperation = commandLine.getOptionValue(ALTER_OPERATION_OPTION); + String modelExecuteNumStr = + commandLine.getOptionValue(BuilderConstants.MODEL_EXECUTE_NUM_OPTION); + Integer modelExecuteNum = + (modelExecuteNumStr == null ? 5 : Integer.parseInt(modelExecuteNumStr)); + + String alterOperation = commandLine.getOptionValue(BuilderConstants.ALTER_OPERATION_OPTION); RecordAlterOperationEnum alterOperationEnum = RecordAlterOperationEnum.valueOf(alterOperation); - boolean enableLeadTo = commandLine.hasOption(LEAD_TO_OPTION); + boolean enableLeadTo = commandLine.hasOption(BuilderConstants.LEAD_TO_OPTION); + + String graphStoreUrl = commandLine.getOptionValue(BuilderConstants.GRAPH_STORE_URL_OPTION); + String searchEngineUrl = commandLine.getOptionValue(BuilderConstants.SEARCH_ENGINE_URL_OPTION); - String graphStoreUrl = commandLine.getOptionValue(GRAPH_STORE_URL_OPTION); - String searchEngineUrl = commandLine.getOptionValue(SEARCH_ENGINE_URL_OPTION); + String project = commandLine.getOptionValue(BuilderConstants.PROJECT_OPTION); ProjectSchema projectSchema = getProjectSchema(projectId, schemaUrl); Map conceptLists = getConceptLists(enableLeadTo, projectSchema); @@ -141,16 +172,22 @@ private static void run(CommandLine commandLine) throws Exception { .setOperation(alterOperationEnum) .setEnableLeadTo(enableLeadTo) .setGraphStoreUrl(graphStoreUrl) - .setSearchEngineUrl(searchEngineUrl); + .setSearchEngineUrl(searchEngineUrl) + .setProject(project) + .setModelExecuteNum(modelExecuteNum) + .setSchemaUrl(schemaUrl); LocalBuilderRunner runner = new LocalBuilderRunner(parallelism); runner.init(pipeline, builderContext); try { runner.execute(); + } catch (Exception e) { + throw new RuntimeException("runner execute exception ", e); } finally { runner.close(); } + System.exit(0); } private static ProjectSchema getProjectSchema(long projectId, String schemaUrl) { diff --git a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/GraphStoreSinkWriter.java b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/GraphStoreSinkWriter.java index cda8e8490..dfca19bea 100644 --- a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/GraphStoreSinkWriter.java +++ b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/GraphStoreSinkWriter.java @@ -45,7 +45,7 @@ public class GraphStoreSinkWriter extends BaseSinkWriter(SPGTypeIdentifier.parse("Text")), SPGTypeEnum.BASIC_TYPE); diff --git a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/Neo4jSinkWriter.java b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/Neo4jSinkWriter.java index 4cfff3843..6c38a1e72 100644 --- a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/Neo4jSinkWriter.java +++ b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/sink/impl/Neo4jSinkWriter.java @@ -18,7 +18,6 @@ import com.antgroup.openspg.builder.core.runtime.BuilderContext; import com.antgroup.openspg.builder.model.exception.BuilderException; import com.antgroup.openspg.builder.model.pipeline.ExecuteNode; -import com.antgroup.openspg.builder.model.pipeline.Node; import com.antgroup.openspg.builder.model.pipeline.config.Neo4jSinkNodeConfig; import com.antgroup.openspg.builder.model.pipeline.enums.StatusEnum; import com.antgroup.openspg.builder.model.record.BaseRecord; @@ -49,14 +48,13 @@ @Slf4j public class Neo4jSinkWriter extends BaseSinkWriter { - private static final int NUM_THREADS = 10; + private static final int NUM_THREADS = 60; - private static final int MAX_NUM_THREADS = 200; - - private ExecuteNode node; + private ExecuteNode node = new ExecuteNode(); private Neo4jStoreClient client; private Project project; private static final String DOT = "."; + private static RejectedExecutionHandler handler = (r, executor) -> { try { @@ -68,10 +66,10 @@ public class Neo4jSinkWriter extends BaseSinkWriter { private static ExecutorService executor = new ThreadPoolExecutor( NUM_THREADS, - MAX_NUM_THREADS, + NUM_THREADS, 2 * 60L, TimeUnit.SECONDS, - new LinkedBlockingQueue<>(200), + new LinkedBlockingQueue<>(100), handler); public Neo4jSinkWriter(String id, String name, Neo4jSinkNodeConfig config) { @@ -82,8 +80,6 @@ public Neo4jSinkWriter(String id, String name, Neo4jSinkNodeConfig config) { public void doInit(BuilderContext context) throws BuilderException { if (context.getExecuteNodes() != null) { this.node = context.getExecuteNodes().get(getId()); - } else { - this.node = new ExecuteNode(new Node(getId(), getName(), getConfig())); } client = new Neo4jStoreClient(context.getGraphStoreUrl()); project = JSON.parseObject(context.getProject(), Project.class); @@ -242,8 +238,8 @@ private void writeEdge(SubGraphRecord.Edge edge) { } log.info( String.format( - "write Edge succeed id:%s cons:%s", - edge.getId(), System.currentTimeMillis() - statr)); + "write Edge succeed from:%s to:%s cons:%s", + edge.getFrom(), edge.getTo(), System.currentTimeMillis() - statr)); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/source/impl/StringSourceReader.java b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/source/impl/StringSourceReader.java index af99a34ba..464107093 100644 --- a/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/source/impl/StringSourceReader.java +++ b/builder/runner/local/src/main/java/com/antgroup/openspg/builder/runner/local/physical/source/impl/StringSourceReader.java @@ -29,7 +29,7 @@ @Slf4j public class StringSourceReader extends BaseSourceReader { - private ExecuteNode node; + private ExecuteNode node = new ExecuteNode(); private String document; @@ -40,7 +40,9 @@ public StringSourceReader(String id, String name, StringSourceNodeConfig config) @Override public void doInit(BuilderContext context) throws BuilderException { if (context.getExecuteNodes() != null) { - this.node = context.getExecuteNodes().get(getId()); + if (context.getExecuteNodes() != null) { + this.node = context.getExecuteNodes().get(getId()); + } if (node != null) { node.setStatus(StatusEnum.RUNNING); node.addTraceLog("Start reading document..."); diff --git a/cloudext/impl/graph-store/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/graphstore/neo4j/Neo4jStoreClient.java b/cloudext/impl/graph-store/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/graphstore/neo4j/Neo4jStoreClient.java index db6cc5e96..c2915a529 100644 --- a/cloudext/impl/graph-store/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/graphstore/neo4j/Neo4jStoreClient.java +++ b/cloudext/impl/graph-store/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/graphstore/neo4j/Neo4jStoreClient.java @@ -128,7 +128,6 @@ public boolean batchTransactionalSchemaOperations(List o @Override public void upsertVertex(@NonNull String vertexTypeName, List vertexRecords) throws Exception { - Long statr = System.currentTimeMillis(); if (CollectionUtils.isEmpty(vertexRecords)) { return; } @@ -151,7 +150,6 @@ record -> { } else { dataUtil.upsertNodes(label, propertiesList, Neo4jCommonUtils.ID, extraLabelsInNeo4j); } - log.info(String.format("upsertVertex cons:%s", System.currentTimeMillis() - statr)); } @Override diff --git a/cloudext/impl/object-storage/minio/pom.xml b/cloudext/impl/object-storage/minio/pom.xml new file mode 100644 index 000000000..a1afd673e --- /dev/null +++ b/cloudext/impl/object-storage/minio/pom.xml @@ -0,0 +1,41 @@ + + + + 4.0.0 + + com.antgroup.openspg.cloudext + cloudext-parent + 0.0.1-SNAPSHOT + ../../../pom.xml + + + cloudext-impl-objectstorage-minio + + + + com.antgroup.openspg.cloudext + cloudext-interface-object-storage + + + com.alibaba + fastjson + + + io.minio + minio + 8.4.3 + + + diff --git a/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClient.java b/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClient.java new file mode 100644 index 000000000..118a7f6f0 --- /dev/null +++ b/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClient.java @@ -0,0 +1,286 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.impl.objectstorage.minio; + +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.common.util.StringUtils; +import io.minio.BucketExistsArgs; +import io.minio.GetObjectArgs; +import io.minio.GetPresignedObjectUrlArgs; +import io.minio.ListObjectsArgs; +import io.minio.MakeBucketArgs; +import io.minio.PutObjectArgs; +import io.minio.RemoveObjectArgs; +import io.minio.Result; +import io.minio.StatObjectArgs; +import io.minio.http.Method; +import io.minio.messages.Item; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.Date; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.compress.utils.IOUtils; +import org.springframework.web.util.UriComponents; +import org.springframework.web.util.UriComponentsBuilder; + +@Slf4j +public class MinioClient implements ObjectStorageClient { + + private final io.minio.MinioClient minioClient; + + @Getter private final String connUrl; + + public MinioClient(String connUrl) { + this.connUrl = connUrl; + this.minioClient = initMinioClient(UriComponentsBuilder.fromUriString(connUrl).build()); + } + + private io.minio.MinioClient initMinioClient(UriComponents uriComponents) { + String scheme = uriComponents.getQueryParams().getFirst(MinioConstants.SCHEME); + scheme = StringUtils.isBlank(scheme) ? "http" : scheme; + String endpoint = String.format("%s://%s", scheme, uriComponents.getHost()); + if (uriComponents.getPort() > 0) { + endpoint = String.format("%s:%s", endpoint, uriComponents.getPort()); + } + String accessKey = uriComponents.getQueryParams().getFirst(MinioConstants.ACCESS_KEY); + String secretKey = uriComponents.getQueryParams().getFirst(MinioConstants.SECRET_KEY); + io.minio.MinioClient client = + io.minio.MinioClient.builder().endpoint(endpoint).credentials(accessKey, secretKey).build(); + return client; + } + + @Override + public Boolean saveData(String bucketName, byte[] data, String fileKey) { + ByteArrayInputStream inputStream = null; + try { + inputStream = new ByteArrayInputStream(data); + makeBucket(bucketName); + PutObjectArgs putObjectArgs = + PutObjectArgs.builder().bucket(bucketName).object(fileKey).stream( + inputStream, data.length, -1) + .contentType("application/octet-stream") + .build(); + minioClient.putObject(putObjectArgs); + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio saveData Exception", e); + } finally { + IOUtils.closeQuietly(inputStream); + } + } + + @Override + public byte[] getData(String bucketName, String fileKey) { + try { + GetObjectArgs getObjectArgs = + GetObjectArgs.builder().bucket(bucketName).object(fileKey).build(); + return inputStreamToByteArray(minioClient.getObject(getObjectArgs)); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio getData Exception", e); + } + } + + public static byte[] inputStreamToByteArray(InputStream inputStream) throws IOException { + try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { + byte[] buffer = new byte[1024]; + int length; + while ((length = inputStream.read(buffer)) != -1) { + byteArrayOutputStream.write(buffer, 0, length); + } + return byteArrayOutputStream.toByteArray(); + } + } + + @Override + public Boolean saveString(String bucketName, String text, String fileKey) { + byte[] data = text.getBytes(StandardCharsets.UTF_8); + return saveData(bucketName, data, fileKey); + } + + @Override + public String getString(String bucketName, String fileKey) { + byte[] data = getData(bucketName, fileKey); + if (data != null) { + return new String(data, StandardCharsets.UTF_8); + } else { + throw new RuntimeException("Data not found."); + } + } + + @Override + public Boolean saveFile(String bucketName, File file, String fileKey) { + InputStream inputStream = null; + try { + inputStream = new FileInputStream(file); + makeBucket(bucketName); + PutObjectArgs putObjectArgs = + PutObjectArgs.builder().bucket(bucketName).object(fileKey).stream( + inputStream, file.length(), -1) + .contentType("application/octet-stream") + .build(); + minioClient.putObject(putObjectArgs); + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio saveFile Exception", e); + } finally { + IOUtils.closeQuietly(inputStream); + } + } + + @Override + public InputStream downloadFile(String bucketName, String fileKey) { + try { + return minioClient.getObject( + GetObjectArgs.builder().bucket(bucketName).object(fileKey).build()); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio getObject Exception", e); + } + } + + @Override + public Boolean downloadFile(String bucketName, String fileKey, String directoryPath) { + InputStream stream = downloadFile(bucketName, fileKey); + OutputStream outputStream = null; + try { + String filePathName = directoryPath + File.separator + new File(fileKey).getName(); + File file = new File(filePathName); + if (!file.exists()) { + file.getParentFile().mkdirs(); + file.createNewFile(); + } + outputStream = new FileOutputStream(filePathName); + byte[] buffer = new byte[1024]; + int bytesRead; + while ((bytesRead = stream.read(buffer)) != -1) { + outputStream.write(buffer, 0, bytesRead); + } + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio downloadFile Exception", e); + } finally { + IOUtils.closeQuietly(stream); + IOUtils.closeQuietly(outputStream); + } + } + + @Override + public String getUrl(String bucketName, String fileKey, Date expiration) { + try { + GetPresignedObjectUrlArgs args = + GetPresignedObjectUrlArgs.builder() + .method(Method.GET) + .bucket(bucketName) + .object(fileKey) + .expiry((int) (expiration.getTime() - System.currentTimeMillis()) / 1000) + .build(); + return minioClient.getPresignedObjectUrl(args); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio getUrl Exception", e); + } + } + + @Override + public String getUrlWithoutExpiration(String bucketName, String fileKey) { + try { + GetPresignedObjectUrlArgs args = + GetPresignedObjectUrlArgs.builder() + .method(Method.GET) + .bucket(bucketName) + .object(fileKey) + .build(); + return minioClient.getPresignedObjectUrl(args); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio getUrlWithoutExpiration Exception", e); + } + } + + @Override + public Boolean removeObject(String bucketName, String fileKey) { + try { + RemoveObjectArgs removeObjectArgs = + RemoveObjectArgs.builder().bucket(bucketName).object(fileKey).build(); + minioClient.removeObject(removeObjectArgs); + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio removeObject Exception", e); + } + } + + @Override + public Boolean removeDirectory(String bucketName, String directoryPath) { + try { + Iterable> objects = + minioClient.listObjects( + ListObjectsArgs.builder() + .bucket(bucketName) + .prefix(directoryPath) + .recursive(true) + .build()); + + for (Result result : objects) { + Item item = result.get(); + log.info("minio Deleting: " + item.objectName()); + if (item.objectName().startsWith(directoryPath)) { + removeObject(bucketName, item.objectName()); + } + } + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio removeDirectory Exception", e); + } + } + + @Override + public Long getContentLength(String bucketName, String objectName) { + try { + makeBucket(bucketName); + StatObjectArgs statObjectArgs = + StatObjectArgs.builder().bucket(bucketName).object(objectName).build(); + return minioClient.statObject(statObjectArgs).size(); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio getContentLength Exception", e); + } + } + + public void makeBucket(String bucketName) { + try { + boolean isExist = + minioClient.bucketExists(BucketExistsArgs.builder().bucket(bucketName).build()); + if (!isExist) { + minioClient.makeBucket(MakeBucketArgs.builder().bucket(bucketName).build()); + } + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("minio makeBucket Exception", e); + } + } +} diff --git a/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClientDriver.java b/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClientDriver.java new file mode 100644 index 000000000..1e0761f97 --- /dev/null +++ b/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioClientDriver.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.impl.objectstorage.minio; + +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriver; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; + +public class MinioClientDriver implements ObjectStorageClientDriver { + + static { + ObjectStorageClientDriverManager.registerDriver(new MinioClientDriver()); + } + + @Override + public String driverScheme() { + return "minio"; + } + + @Override + public ObjectStorageClient connect(String url) { + return new MinioClient(url); + } +} diff --git a/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioConstants.java b/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioConstants.java new file mode 100644 index 000000000..99b5b5960 --- /dev/null +++ b/cloudext/impl/object-storage/minio/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/minio/MinioConstants.java @@ -0,0 +1,20 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.impl.objectstorage.minio; + +public class MinioConstants { + public static final String SCHEME = "scheme"; + public static final String ACCESS_KEY = "accessKey"; + public static final String SECRET_KEY = "secretKey"; +} diff --git a/cloudext/impl/object-storage/oss/pom.xml b/cloudext/impl/object-storage/oss/pom.xml new file mode 100644 index 000000000..8ee9daf16 --- /dev/null +++ b/cloudext/impl/object-storage/oss/pom.xml @@ -0,0 +1,41 @@ + + + + 4.0.0 + + com.antgroup.openspg.cloudext + cloudext-parent + 0.0.1-SNAPSHOT + ../../../pom.xml + + + cloudext-impl-objectstorage-oss + + + + com.antgroup.openspg.cloudext + cloudext-interface-object-storage + + + com.alibaba + fastjson + + + com.aliyun.oss + aliyun-sdk-oss + 3.15.0 + + + diff --git a/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClient.java b/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClient.java new file mode 100644 index 000000000..28843a755 --- /dev/null +++ b/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClient.java @@ -0,0 +1,239 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.impl.objectstorage.oss; + +import com.aliyun.oss.ClientBuilderConfiguration; +import com.aliyun.oss.OSS; +import com.aliyun.oss.OSSClientBuilder; +import com.aliyun.oss.model.OSSObject; +import com.aliyun.oss.model.OSSObjectSummary; +import com.aliyun.oss.model.ObjectListing; +import com.aliyun.oss.model.ObjectMetadata; +import com.aliyuncs.utils.IOUtils; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.common.util.StringUtils; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.Date; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.springframework.web.util.UriComponents; +import org.springframework.web.util.UriComponentsBuilder; + +@Slf4j +public class OSSClient implements ObjectStorageClient { + + private final OSS ossClient; + + @Getter private final String connUrl; + + public OSSClient(String connUrl) { + this.connUrl = connUrl; + this.ossClient = initOSSClient(UriComponentsBuilder.fromUriString(connUrl).build()); + } + + private OSS initOSSClient(UriComponents uriComponents) { + String scheme = uriComponents.getQueryParams().getFirst(OSSConstants.SCHEME); + String endpoint = uriComponents.getHost(); + if (StringUtils.isNotBlank(scheme)) { + endpoint = String.format("%s://%s", scheme, endpoint); + } + if (uriComponents.getPort() > 0) { + endpoint = String.format("%s:%s", endpoint, uriComponents.getPort()); + } + String accessKey = uriComponents.getQueryParams().getFirst(OSSConstants.ACCESS_KEY); + String secretKey = uriComponents.getQueryParams().getFirst(OSSConstants.SECRET_KEY); + String timout = uriComponents.getQueryParams().getFirst(OSSConstants.CONNECTION_TIMEOUT); + if (StringUtils.isNotBlank(timout)) { + ClientBuilderConfiguration configuration = new ClientBuilderConfiguration(); + configuration.setConnectionTimeout(Integer.valueOf(timout)); + return new OSSClientBuilder().build(endpoint, accessKey, secretKey, configuration); + } else { + return new OSSClientBuilder().build(endpoint, accessKey, secretKey); + } + } + + @Override + public Boolean saveData(String bucketName, byte[] data, String fileKey) { + try (InputStream inputStream = new ByteArrayInputStream(data)) { + ossClient.putObject(bucketName, fileKey, inputStream); + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS saveData Exception", e); + } + } + + @Override + public byte[] getData(String bucketName, String fileKey) { + try (OSSObject ossObject = ossClient.getObject(bucketName, fileKey); + InputStream inputStream = ossObject.getObjectContent()) { + return inputStreamToByteArray(inputStream); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS getData Exception", e); + } + } + + public static byte[] inputStreamToByteArray(InputStream inputStream) throws IOException { + try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream()) { + byte[] buffer = new byte[1024]; + int length; + while ((length = inputStream.read(buffer)) != -1) { + byteArrayOutputStream.write(buffer, 0, length); + } + return byteArrayOutputStream.toByteArray(); + } + } + + @Override + public Boolean saveString(String bucketName, String text, String fileKey) { + byte[] data = text.getBytes(StandardCharsets.UTF_8); + return saveData(bucketName, data, fileKey); + } + + @Override + public String getString(String bucketName, String fileKey) { + byte[] data = getData(bucketName, fileKey); + if (data != null) { + return new String(data); + } else { + throw new RuntimeException("Data not found."); + } + } + + @Override + public Boolean saveFile(String bucketName, File file, String fileKey) { + try (InputStream inputStream = new FileInputStream(file)) { + ossClient.putObject(bucketName, fileKey, inputStream); + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS saveFile Exception", e); + } + } + + @Override + public InputStream downloadFile(String bucketName, String fileKey) { + try { + OSSObject ossObject = ossClient.getObject(bucketName, fileKey); + return ossObject.getObjectContent(); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS getObject Exception", e); + } + } + + @Override + public Boolean downloadFile(String bucketName, String fileKey, String directoryPath) { + InputStream stream = downloadFile(bucketName, fileKey); + OutputStream outputStream = null; + try { + String filePathName = directoryPath + File.separator + new File(fileKey).getName(); + File file = new File(filePathName); + if (!file.exists()) { + file.getParentFile().mkdirs(); + file.createNewFile(); + } + outputStream = new FileOutputStream(filePathName); + byte[] buffer = new byte[1024]; + int bytesRead; + while ((bytesRead = stream.read(buffer)) != -1) { + outputStream.write(buffer, 0, bytesRead); + } + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS downloadFile Exception", e); + } finally { + IOUtils.closeQuietly(stream); + IOUtils.closeQuietly(outputStream); + } + } + + @Override + public String getUrl(String bucketName, String fileKey, Date expiration) { + try { + return ossClient.generatePresignedUrl(bucketName, fileKey, expiration).toString(); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS getUrl Exception", e); + } + } + + @Override + public String getUrlWithoutExpiration(String bucketName, String fileKey) { + try { + return ossClient + .generatePresignedUrl(bucketName, fileKey, new Date(Long.MAX_VALUE)) + .toString(); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS getUrlWithoutExpiration Exception", e); + } + } + + @Override + public Boolean removeObject(String bucketName, String fileKey) { + try { + ossClient.deleteObject(bucketName, fileKey); + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS removeObject Exception", e); + } + } + + @Override + public Boolean removeDirectory(String bucketName, String directoryPath) { + try { + ObjectListing objectListing; + + do { + objectListing = ossClient.listObjects(bucketName, directoryPath); + for (OSSObjectSummary objectSummary : objectListing.getObjectSummaries()) { + String objectName = objectSummary.getKey(); + log.info("OSS Deleting: " + objectName); + if (objectName.startsWith(directoryPath)) { + removeObject(bucketName, objectName); + } + } + objectListing.getNextMarker(); + } while (objectListing.isTruncated()); + + return true; + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS removeDirectory Exception", e); + } + } + + @Override + public Long getContentLength(String bucketName, String objectName) { + try { + ObjectMetadata metadata = ossClient.getObjectMetadata(bucketName, objectName); + return metadata.getContentLength(); + } catch (Exception e) { + log.error(e.getMessage(), e); + throw new RuntimeException("OSS getContentLength Exception", e); + } + } +} diff --git a/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClientDriver.java b/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClientDriver.java new file mode 100644 index 000000000..73d5a0d72 --- /dev/null +++ b/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSClientDriver.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.impl.objectstorage.oss; + +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriver; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; + +public class OSSClientDriver implements ObjectStorageClientDriver { + + static { + ObjectStorageClientDriverManager.registerDriver(new OSSClientDriver()); + } + + @Override + public String driverScheme() { + return "oss"; + } + + @Override + public ObjectStorageClient connect(String url) { + return new OSSClient(url); + } +} diff --git a/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSConstants.java b/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSConstants.java new file mode 100644 index 000000000..62e4a0bf3 --- /dev/null +++ b/cloudext/impl/object-storage/oss/src/main/java/com/antgroup/openspg/cloudext/impl/objectstorage/oss/OSSConstants.java @@ -0,0 +1,21 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.impl.objectstorage.oss; + +public class OSSConstants { + public static final String SCHEME = "scheme"; + public static final String ACCESS_KEY = "accessKeyId"; + public static final String SECRET_KEY = "accessKeySecret"; + public static final String CONNECTION_TIMEOUT = "connectionTimeout"; +} diff --git a/cloudext/impl/search-engine/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/searchengine/neo4j/Neo4jSearchClient.java b/cloudext/impl/search-engine/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/searchengine/neo4j/Neo4jSearchClient.java index 3dd300458..5191d1132 100644 --- a/cloudext/impl/search-engine/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/searchengine/neo4j/Neo4jSearchClient.java +++ b/cloudext/impl/search-engine/neo4j/src/main/java/com/antgroup/openspg/cloudext/impl/searchengine/neo4j/Neo4jSearchClient.java @@ -32,12 +32,14 @@ import java.util.List; import java.util.Map; import lombok.Getter; +import lombok.extern.slf4j.Slf4j; import org.neo4j.driver.Driver; import org.neo4j.driver.Record; import org.neo4j.driver.types.Node; import org.springframework.web.util.UriComponents; import org.springframework.web.util.UriComponentsBuilder; +@Slf4j public class Neo4jSearchClient extends BaseIdxSearchEngineClient { private final Neo4jIndexUtils client; @@ -98,7 +100,8 @@ private List doNeo4jSearch(SearchRequest request) { if (query instanceof FullTextSearchQuery) { FullTextSearchQuery q = (FullTextSearchQuery) query; List labelConstraints = q.getLabelConstraints(); - return client.textSearch(q.getQueryString(), labelConstraints, topk, indexName); + return client.textSearch( + q.getQueryString(), labelConstraints, request.getFrom(), topk, indexName); } else if (query instanceof VectorSearchQuery) { VectorSearchQuery q = (VectorSearchQuery) query; return client.vectorSearch( @@ -115,7 +118,7 @@ public List search(SearchRequest request) { List results = new ArrayList<>(); for (Record r : records) { Node node = r.get("node").asNode(); - String docId = node.get(Neo4jCommonUtils.ID).asString(); + String docId = String.valueOf(node.id()); double score = r.get("score").asDouble(); Map fields = new HashMap<>(node.asMap()); ArrayList labels = new ArrayList<>(); diff --git a/cloudext/interface/computing-engine/pom.xml b/cloudext/interface/computing-engine/pom.xml new file mode 100644 index 000000000..1b64b1936 --- /dev/null +++ b/cloudext/interface/computing-engine/pom.xml @@ -0,0 +1,35 @@ + + + + 4.0.0 + + com.antgroup.openspg.cloudext + cloudext-parent + 0.0.1-SNAPSHOT + ../../pom.xml + + + cloudext-interface-computing-engine + + + com.antgroup.openspg + common-util + + + com.antgroup.openspg.builder + builder-model + + + diff --git a/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClient.java b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClient.java new file mode 100644 index 000000000..194d331bb --- /dev/null +++ b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClient.java @@ -0,0 +1,28 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.computingengine; + +import com.antgroup.openspg.cloudext.interfaces.computingengine.model.ComputingStatusEnum; +import com.antgroup.openspg.cloudext.interfaces.computingengine.model.ComputingTask; +import com.antgroup.openspg.common.util.cloudext.CloudExtClient; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; + +public interface ComputingEngineClient extends CloudExtClient { + + ComputingTask submitBuilderJob(BuilderJob builderJob, T extension); + + ComputingStatusEnum queryStatus(T extension, String id); + + Boolean stop(T extension, String id); +} diff --git a/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriver.java b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriver.java new file mode 100644 index 000000000..2282e3e38 --- /dev/null +++ b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriver.java @@ -0,0 +1,18 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.computingengine; + +import com.antgroup.openspg.common.util.cloudext.CloudExtClientDriver; + +public interface ComputingEngineClientDriver extends CloudExtClientDriver {} diff --git a/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriverManager.java b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriverManager.java new file mode 100644 index 000000000..b372af57d --- /dev/null +++ b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineClientDriverManager.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.computingengine; + +import com.antgroup.openspg.common.util.DriverManagerUtils; +import com.antgroup.openspg.server.common.model.exception.CloudExtException; +import java.util.concurrent.CopyOnWriteArrayList; +import lombok.extern.slf4j.Slf4j; +import org.springframework.web.util.UriComponents; +import org.springframework.web.util.UriComponentsBuilder; + +@Slf4j +public class ComputingEngineClientDriverManager { + + private static final CopyOnWriteArrayList registeredDrivers = + new CopyOnWriteArrayList<>(); + + private ComputingEngineClientDriverManager() {} + + static { + DriverManagerUtils.loadDrivers("cloudext.cache.drivers", ComputingEngineClientDriver.class); + log.info("ComputingEngine DriverManager initialized"); + } + + public static synchronized void registerDriver(ComputingEngineClientDriver driver) { + if (driver != null) { + registeredDrivers.addIfAbsent(driver); + } else { + throw new NullPointerException(); + } + log.info("registerDriver: {}", driver); + } + + public static ComputingEngineClient getClient(String connUrl) { + UriComponents uriComponents = UriComponentsBuilder.fromUriString(connUrl).build(); + for (ComputingEngineClientDriver driver : registeredDrivers) { + if (driver.acceptsConfig(uriComponents.getScheme())) { + return driver.connect(connUrl); + } + } + throw CloudExtException.driverNotExist(connUrl); + } +} diff --git a/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineConstants.java b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineConstants.java new file mode 100644 index 000000000..54a19226c --- /dev/null +++ b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/ComputingEngineConstants.java @@ -0,0 +1,18 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.computingengine; + +public class ComputingEngineConstants { + public static final String USER_NUMBER = "userNumber"; +} diff --git a/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingStatusEnum.java b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingStatusEnum.java new file mode 100644 index 000000000..42fa27076 --- /dev/null +++ b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingStatusEnum.java @@ -0,0 +1,23 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.cloudext.interfaces.computingengine.model; + +public enum ComputingStatusEnum { + SUBMIT, + RUNNING, + SUCCESS, + FAILED, + STOP, + NOTFOUND, + UNDEFINED +} diff --git a/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingTask.java b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingTask.java new file mode 100644 index 000000000..0a4c3d95b --- /dev/null +++ b/cloudext/interface/computing-engine/src/main/java/com/antgroup/openspg/cloudext/interfaces/computingengine/model/ComputingTask.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.computingengine.model; + +import com.antgroup.openspg.server.common.model.base.BaseValObj; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class ComputingTask extends BaseValObj { + + private static final long serialVersionUID = 8781414879162199540L; + + private String taskId; + + private String logUrl; +} diff --git a/cloudext/interface/computing-engine/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.computingengine.ComputingEngineClientDriver b/cloudext/interface/computing-engine/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.computingengine.ComputingEngineClientDriver new file mode 100644 index 000000000..149ab24b2 --- /dev/null +++ b/cloudext/interface/computing-engine/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.computingengine.ComputingEngineClientDriver @@ -0,0 +1 @@ +com.antgroup.openspg.cloudext.impl.computingengine.aistudio.AiStudioClientDriver \ No newline at end of file diff --git a/cloudext/interface/object-storage/pom.xml b/cloudext/interface/object-storage/pom.xml new file mode 100644 index 000000000..4b9b3c1e6 --- /dev/null +++ b/cloudext/interface/object-storage/pom.xml @@ -0,0 +1,35 @@ + + + + 4.0.0 + + com.antgroup.openspg.cloudext + cloudext-parent + 0.0.1-SNAPSHOT + ../../pom.xml + + + cloudext-interface-object-storage + + + com.antgroup.openspg + common-util + + + com.antgroup.openspg.builder + builder-model + + + diff --git a/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClient.java b/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClient.java new file mode 100644 index 000000000..9da569027 --- /dev/null +++ b/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClient.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.objectstorage; + +import com.antgroup.openspg.common.util.cloudext.CloudExtClient; +import java.io.File; +import java.io.InputStream; +import java.util.Date; + +public interface ObjectStorageClient extends CloudExtClient { + + Boolean saveData(String bucketName, byte[] data, String fileKey); + + byte[] getData(String bucketName, String fileKey); + + Boolean saveString(String bucketName, String text, String fileKey); + + String getString(String bucketName, String fileKey); + + Boolean saveFile(String bucketName, File file, String fileKey); + + public InputStream downloadFile(String bucketName, String fileKey); + + Boolean downloadFile(String bucketName, String fileKey, String directoryPath); + + String getUrl(String bucketName, String fileKey, Date expiration); + + String getUrlWithoutExpiration(String bucketName, String fileKey); + + Boolean removeObject(String bucketName, String fileKey); + + Boolean removeDirectory(String bucketName, String directoryPath); + + Long getContentLength(String bucketName, String objectName); +} diff --git a/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriver.java b/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriver.java new file mode 100644 index 000000000..3d5ecdff7 --- /dev/null +++ b/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriver.java @@ -0,0 +1,18 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.objectstorage; + +import com.antgroup.openspg.common.util.cloudext.CloudExtClientDriver; + +public interface ObjectStorageClientDriver extends CloudExtClientDriver {} diff --git a/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriverManager.java b/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriverManager.java new file mode 100644 index 000000000..ea17620aa --- /dev/null +++ b/cloudext/interface/object-storage/src/main/java/com/antgroup/openspg/cloudext/interfaces/objectstorage/ObjectStorageClientDriverManager.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.cloudext.interfaces.objectstorage; + +import com.antgroup.openspg.common.util.DriverManagerUtils; +import com.antgroup.openspg.server.common.model.exception.CloudExtException; +import java.util.concurrent.CopyOnWriteArrayList; +import lombok.extern.slf4j.Slf4j; +import org.springframework.web.util.UriComponents; +import org.springframework.web.util.UriComponentsBuilder; + +@Slf4j +public class ObjectStorageClientDriverManager { + + private static final CopyOnWriteArrayList registeredDrivers = + new CopyOnWriteArrayList<>(); + + private ObjectStorageClientDriverManager() {} + + static { + DriverManagerUtils.loadDrivers( + "cloudext.objectstorage.drivers", ObjectStorageClientDriver.class); + log.info("ObjectStorage DriverManager initialized"); + } + + public static synchronized void registerDriver(ObjectStorageClientDriver driver) { + if (driver != null) { + registeredDrivers.addIfAbsent(driver); + } else { + throw new NullPointerException(); + } + log.info("registerDriver: {}", driver); + } + + public static ObjectStorageClient getClient(String connUrl) { + UriComponents uriComponents = UriComponentsBuilder.fromUriString(connUrl).build(); + for (ObjectStorageClientDriver driver : registeredDrivers) { + if (driver.acceptsConfig(uriComponents.getScheme())) { + return driver.connect(connUrl); + } + } + throw CloudExtException.driverNotExist(connUrl); + } +} diff --git a/cloudext/interface/object-storage/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriver b/cloudext/interface/object-storage/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriver new file mode 100644 index 000000000..3642a1eef --- /dev/null +++ b/cloudext/interface/object-storage/src/main/resources/META-INF/services/com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriver @@ -0,0 +1,2 @@ +com.antgroup.openspg.cloudext.impl.objectstorage.minio.MinioClientDriver +com.antgroup.openspg.cloudext.impl.objectstorage.oss.OSSClientDriver \ No newline at end of file diff --git a/cloudext/pom.xml b/cloudext/pom.xml index efc50c056..6a8fafd89 100644 --- a/cloudext/pom.xml +++ b/cloudext/pom.xml @@ -38,11 +38,15 @@ interface/graph-store interface/search-engine interface/cache + interface/object-storage + interface/computing-engine impl/graph-store/tugraph impl/graph-store/neo4j impl/search-engine/elasticsearch impl/search-engine/neo4j impl/cache/redis + impl/object-storage/minio + impl/object-storage/oss diff --git a/common/util/pom.xml b/common/util/pom.xml index 1e0e20d17..8543e9b0b 100644 --- a/common/util/pom.xml +++ b/common/util/pom.xml @@ -59,5 +59,17 @@ org.neo4j.driver neo4j-java-driver + + net.sf.dozer + dozer + + + com.alibaba + pemja + + + com.github.rholder + guava-retrying + diff --git a/common/util/src/main/java/com/antgroup/openspg/common/constants/BuilderConstant.java b/common/util/src/main/java/com/antgroup/openspg/common/constants/BuilderConstant.java new file mode 100644 index 000000000..e5d0181a4 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/constants/BuilderConstant.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.constants; + +public class BuilderConstant { + + public static final String PROJECT_ID = "project_id"; + public static final String TOKEN = "token"; + public static final String YU_QUE_CONFIG = "yuqueConfig"; + public static final String SPLIT_CONFIG = "splitConfig"; + public static final String EXTRACT_CONFIG = "extractConfig"; + public static final String SEMANTIC_SPLIT = "semanticSplit"; + public static final String AUTO_WRITE = "autoWrite"; + public static final String SPLIT_LENGTH = "splitLength"; + public static final String PY_SPLIT_LENGTH = "split_length"; + public static final String PY_WINDOW_LENGTH = "window_length"; + public static final String DEFAULT_VERSION = "V3"; + public static final String KAG_WRITER_ASYNC_TASK = "kagWriterAsyncTask"; + + public static final String SCANNER = "scanner"; + public static final String READER = "reader"; + public static final String TYPE = "type"; + public static final String ID_COL = "id_col"; + public static final String NAME_COL = "name_col"; + public static final String CONTENT_COL = "content_col"; + public static final String HEADER = "header"; + public static final String COL_NAMES = "col_names"; + public static final String CUT_DEPTH = "cut_depth"; + public static final String LLM = "llm"; + public static final String VECTORIZE_MODEL = "vectorize_model"; + public static final String ID = "id"; + public static final String NAME = "name"; + public static final String CONTENT = "content"; + public static final String DICT = "dict"; + public static final String FILE = "file"; + public static final String SEMANTIC = "semantic"; + public static final String LENGTH = "length"; + public static final String BATCH = "batch"; + public static final String BASE = "base"; + public static final String SCHEMA_FREE = "schema_free"; + public static final String SPLITTER_ABC = "SplitterABC"; + public static final String EXTRACTOR_ABC = "ExtractorABC"; + public static final String VECTORIZER_ABC = "VectorizerABC"; + public static final String POSTPROCESSOR_ABC = "PostProcessorABC"; + + public static final String YU_QUE = "yuque"; + public static final String TXT = "txt"; + public static final String CSV = "csv"; + public static final String PDF = "pdf"; + public static final String MD = "md"; + public static final String JSON = "json"; + public static final String DOC = "doc"; + public static final String DOCX = "docx"; +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/CommonUtils.java b/common/util/src/main/java/com/antgroup/openspg/common/util/CommonUtils.java new file mode 100644 index 000000000..f81a7fd48 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/CommonUtils.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util; + +import java.io.File; +import org.springframework.util.Assert; + +public class CommonUtils { + + /** The maximum number of non-paging entries for a DB query */ + public static final int INNER_QUERY_MAX_COUNT = 10000; + + public static void checkQueryPage(int count, Integer pageNo, Integer pageSize) { + // pageNo is empty to indicate no paging + if (pageNo == null) { + // If the query is all, it must be less than the maximum limit to prevent OOM + Assert.isTrue( + count <= INNER_QUERY_MAX_COUNT, + String.format( + "The current query data volume %s exceeds the maximum limit %s, please use pagination query", + count, INNER_QUERY_MAX_COUNT)); + return; + } + // When pageNo is not empty, pageSize cannot be empty either + Assert.notNull(pageSize, "pageSize cannot be null"); + // pageSize cannot be larger than the maximum value + Assert.isTrue( + pageSize <= INNER_QUERY_MAX_COUNT, + String.format( + "The current query data volume %s exceeds the maximum limit %s, please use pagination query", + pageSize, INNER_QUERY_MAX_COUNT)); + } + + public static String getInstanceStorageFileKey(Long projectId, Long instanceId) { + return "builder" + + File.separator + + "project_" + + projectId + + File.separator + + "instance_" + + instanceId + + File.separator; + } + + public static String getTaskStorageFileKey( + Long projectId, Long instanceId, Long taskId, String type) { + return getInstanceStorageFileKey(projectId, instanceId) + taskId + "_" + type + ".kag"; + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/DozerBeanMapperUtil.java b/common/util/src/main/java/com/antgroup/openspg/common/util/DozerBeanMapperUtil.java new file mode 100644 index 000000000..a08b9e84e --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/DozerBeanMapperUtil.java @@ -0,0 +1,56 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.common.util; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import org.dozer.DozerBeanMapper; +import org.springframework.util.CollectionUtils; + +public class DozerBeanMapperUtil { + + private static final DozerBeanMapper DOZER_BEAN_MAPPER; + + static { + List mappingFileUrls = Collections.singletonList("dozer-custom-convert.xml"); + DOZER_BEAN_MAPPER = new DozerBeanMapper(); + DOZER_BEAN_MAPPER.setMappingFiles(mappingFileUrls); + } + + public static T map(Object source, Class destinationClass) { + T destinationBean = null; + if (source != null) { + destinationBean = DOZER_BEAN_MAPPER.map(source, destinationClass); + } + return destinationBean; + } + + public static List mapList( + @SuppressWarnings("rawtypes") Collection sourceList, Class destinationClass) { + if (CollectionUtils.isEmpty(sourceList)) { + return Collections.emptyList(); + } + + List destinationList = new ArrayList<>(sourceList.size()); + for (Object sourceObject : sourceList) { + T destinationObject = map(sourceObject, destinationClass); + destinationList.add(destinationObject); + } + return destinationList; + } + + private DozerBeanMapperUtil() {} +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/ECBUtil.java b/common/util/src/main/java/com/antgroup/openspg/common/util/ECBUtil.java new file mode 100644 index 000000000..b58bf8870 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/ECBUtil.java @@ -0,0 +1,85 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util; + +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; +import javax.crypto.Cipher; +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import javax.crypto.spec.SecretKeySpec; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.lang3.StringUtils; + +@Slf4j +public class ECBUtil { + + private static final String KEY_ALGORITHM = "AES"; + + private static final String DEFAULT_CIPHER_ALGORITHM = "AES/ECB/PKCS5Padding"; + + public static String encrypt(String content, String password) { + if (StringUtils.isBlank(content)) { + return content; + } + try { + Cipher cipher = Cipher.getInstance(DEFAULT_CIPHER_ALGORITHM); + byte[] byteContent = content.getBytes("utf-8"); + cipher.init(Cipher.ENCRYPT_MODE, getSecretKey(password)); + byte[] result = cipher.doFinal(byteContent); + return Base64.encodeBase64String(result); + } catch (Exception ex) { + log.error("ecb encrypt error", ex); + throw new RuntimeException("ecb encrypt error", ex); + } + } + + public static String decrypt(String content, String password) { + if (StringUtils.isBlank(content)) { + return content; + } + try { + Cipher cipher = Cipher.getInstance(DEFAULT_CIPHER_ALGORITHM); + cipher.init(Cipher.DECRYPT_MODE, getSecretKey(password)); + byte[] result = cipher.doFinal(Base64.decodeBase64(content)); + return new String(result, "utf-8"); + } catch (Exception ex) { + log.error("ecb decrypt error", ex); + throw new RuntimeException("ecb decrypt error", ex); + } + } + + /** + * 生成加密秘钥 + * + * @return + */ + private static SecretKeySpec getSecretKey(final String password) { + if (StringUtils.isBlank(password)) { + return null; + } + KeyGenerator kg; + try { + SecureRandom random = SecureRandom.getInstance("SHA1PRNG"); + random.setSeed(password.getBytes()); + kg = KeyGenerator.getInstance(KEY_ALGORITHM); + kg.init(128, random); + SecretKey secretKey = kg.generateKey(); + return new SecretKeySpec(secretKey.getEncoded(), KEY_ALGORITHM); + } catch (NoSuchAlgorithmException ex) { + log.error("ecb getSecretKey error password:" + password, ex); + throw new RuntimeException("ecb getSecretKey error password:" + password, ex); + } + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/PartitionUtils.java b/common/util/src/main/java/com/antgroup/openspg/common/util/PartitionUtils.java new file mode 100644 index 000000000..2725e75c6 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/PartitionUtils.java @@ -0,0 +1,200 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.springframework.util.Assert; + +public class PartitionUtils { + + public static List analysisPartition(String partitionStr, String bizDate) { + return analysisPartition(partitionStr, bizDate, ","); + } + + public static List analysisPartition( + String partitionStr, String bizDate, String delimiter) { + List partitions = new ArrayList<>(); + if (partitionStr.contains("&")) { + String[] partitionArr = partitionStr.split("&"); + for (String partitionArrStr : partitionArr) { + String partition = replaceDtVariable(partitionArrStr, bizDate, delimiter); + partitions.add(partition); + } + } else if (partitionStr.contains("|")) { + List partition = replaceDtVariableAndMultiValue(partitionStr, bizDate); + String basePartition = partition.get(0); + String multiKey = partition.get(1); + String multiValue = partition.get(2); + String[] multiValueArr = multiValue.split("\\|"); + for (int i = 0; i < multiValueArr.length; i++) { + String tmpPartition = basePartition + "," + multiKey + "=" + multiValueArr[i]; + tmpPartition = replaceDtVariable(tmpPartition, bizDate, delimiter); + partitions.add(tmpPartition); + } + } else { + String partition = replaceDtVariable(partitionStr, bizDate, delimiter); + partitions.add(partition); + } + return partitions; + } + + public static String replaceDtVariable( + String partitionWithVariable, String bizDate, String delimiter) { + PartitionSpec partitionSpec; + try { + partitionSpec = new PartitionSpec(partitionWithVariable); + } catch (IllegalArgumentException e) { + throw new RuntimeException( + String.format("Partition information error:%s", partitionWithVariable)); + } + if (StringUtils.isNotBlank(bizDate)) { + for (String key : partitionSpec.keys()) { + String value = partitionSpec.get(key); + if (value.contains("$")) { + partitionSpec.set(key, bizDate); + } + } + } + + return partitionSpec.toString(true, delimiter); + } + + public static PartitionSpec replacePartitionSpec(String partition) { + PartitionSpec partitionSpec; + try { + partitionSpec = new PartitionSpec(partition); + } catch (IllegalArgumentException e) { + throw new RuntimeException(String.format("Partition information error:%s", partition)); + } + return partitionSpec; + } + + public static List replaceDtVariableAndMultiValue( + String partitionWithVariable, String bizDate) { + PartitionSpec partitionSpec; + try { + partitionSpec = new PartitionSpec(partitionWithVariable); + } catch (IllegalArgumentException e) { + throw new RuntimeException( + String.format("Partition information error:%s", partitionWithVariable)); + } + String multiValue = ""; + String multiKey = ""; + if (StringUtils.isNotBlank(bizDate)) { + for (String key : partitionSpec.keys()) { + String value = partitionSpec.get(key); + if (value.contains("$")) { + partitionSpec.set(key, bizDate); + } + if (value.contains("|")) { + multiKey = key; + multiValue = value; + } + } + } + List result = new LinkedList<>(); + result.add(partitionSpec.toString()); + result.add(multiKey); + result.add(multiValue); + return result; + } + + public static String[] getDatabaseAndTable(String sourceId) { + String[] split = StringUtils.split(sourceId, "."); + Assert.isTrue( + split.length == 2, + String.format( + "sourceId must be in the format dbName.tableName,currently sourceId:%s", sourceId)); + return split; + } + + public static class PartitionSpec { + private Map kv = new LinkedHashMap(); + + public PartitionSpec() {} + + public PartitionSpec(String spec) { + if (spec == null) { + throw new IllegalArgumentException("Argument 'spec' cannot be null"); + } else { + String[] groups = spec.split("[,/]"); + String[] var3 = groups; + int var4 = groups.length; + + for (int var5 = 0; var5 < var4; ++var5) { + String group = var3[var5]; + String[] kv = group.split("="); + if (kv.length != 2) { + throw new IllegalArgumentException("Invalid partition spec."); + } + + String k = kv[0].trim(); + String v = kv[1].trim().replaceAll("'", "").replaceAll("\"", ""); + if (k.length() == 0 || v.length() == 0) { + throw new IllegalArgumentException("Invalid partition spec."); + } + + this.set(k, v); + } + } + } + + public void set(String key, String value) { + this.kv.put(key, value); + } + + public String get(String key) { + return this.kv.get(key); + } + + public Set keys() { + return this.kv.keySet(); + } + + public boolean isEmpty() { + return this.kv.isEmpty(); + } + + @Override + public String toString() { + return this.toString(true, ","); + } + + public String toString(boolean quote, String delimiter) { + List entries = new LinkedList(); + String[] keys = this.keys().toArray(new String[0]); + String[] var6 = keys; + int var7 = keys.length; + + for (int var8 = 0; var8 < var7; ++var8) { + String key = var6[var8]; + StringBuilder entryBuilder = new StringBuilder(); + entryBuilder.append(key).append("="); + if (quote) { + entryBuilder.append("'").append(this.kv.get(key)).append("'"); + } else { + entryBuilder.append(this.kv.get(key)); + } + + entries.add(entryBuilder.toString()); + } + + return String.join(delimiter, entries); + } + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/RetryerUtil.java b/common/util/src/main/java/com/antgroup/openspg/common/util/RetryerUtil.java new file mode 100644 index 000000000..4a276d47e --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/RetryerUtil.java @@ -0,0 +1,77 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util; + +import com.github.rholder.retry.Retryer; +import com.github.rholder.retry.RetryerBuilder; +import com.github.rholder.retry.StopStrategies; +import com.github.rholder.retry.WaitStrategies; +import com.google.common.base.Predicates; +import com.google.common.collect.Maps; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReentrantLock; + +public class RetryerUtil { + + public static final String COLON = ":"; + private static final Long DEFAULT_MULTIPLIER = 100L; + private static final Long DEFAULT_MAXIMUM_TIME = 5L; + private static final Integer DEFAULT_ATTEMPT_NUMBER = 10; + + private static Map> retryerMap = Maps.newConcurrentMap(); + + private static ReentrantLock reentrantLock = new ReentrantLock(); + + public static Retryer getRetryer( + Long multiplier, Long maximumTime, Integer attemptNumber) { + if (multiplier == null || multiplier < 0) { + multiplier = DEFAULT_MULTIPLIER; + } + if (maximumTime == null || maximumTime < 0) { + maximumTime = DEFAULT_MAXIMUM_TIME; + } + if (attemptNumber == null || attemptNumber < -1) { + attemptNumber = DEFAULT_ATTEMPT_NUMBER; + } + + String key = multiplier + COLON + maximumTime + COLON + attemptNumber; + Retryer retryer = retryerMap.get(key); + if (retryer == null) { + reentrantLock.lock(); + try { + if ((retryer = retryerMap.get(key)) == null) { + RetryerBuilder retryerBuilder = + RetryerBuilder.newBuilder() + .retryIfException() + .retryIfResult(Predicates.equalTo(false)) + .withWaitStrategy( + WaitStrategies.exponentialWait(multiplier, maximumTime, TimeUnit.SECONDS)); + + if (attemptNumber == -1) { + retryerBuilder.withStopStrategy(StopStrategies.neverStop()).build(); + } else { + retryerBuilder.withStopStrategy(StopStrategies.stopAfterAttempt(attemptNumber)); + } + retryer = retryerBuilder.build(); + retryerMap.put(key, retryer); + } + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + reentrantLock.unlock(); + } + } + return retryer; + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/StringUtils.java b/common/util/src/main/java/com/antgroup/openspg/common/util/StringUtils.java index 4985f53ea..66101fb91 100644 --- a/common/util/src/main/java/com/antgroup/openspg/common/util/StringUtils.java +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/StringUtils.java @@ -14,18 +14,18 @@ package com.antgroup.openspg.common.util; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.apache.commons.text.StringSubstitutor; public class StringUtils extends org.apache.commons.lang3.StringUtils { - /** - * 将object转化成string返回,常用于POJO对象未实现toString()场景, - * - * @param object 对象 - * @return 对象string表示 - */ + private static Pattern humpPattern = Pattern.compile("[A-Z]"); + + public static final String UNDERLINE_SEPARATOR = "_"; + public static String toString(Object object) { if (object instanceof String) { return object.toString(); @@ -37,4 +37,14 @@ public static String dictFormat(Map vars, String template) { StringSubstitutor substitutor = new StringSubstitutor(vars, "${", "}"); return substitutor.replace(template); } + + public static String humpToLine(String str) { + Matcher matcher = humpPattern.matcher(str); + StringBuffer sb = new StringBuffer(); + while (matcher.find()) { + matcher.appendReplacement(sb, UNDERLINE_SEPARATOR + matcher.group(0).toLowerCase()); + } + matcher.appendTail(sb); + return sb.toString(); + } } diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/constants/CommonConstant.java b/common/util/src/main/java/com/antgroup/openspg/common/util/constants/CommonConstant.java new file mode 100644 index 000000000..25848f1d1 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/constants/CommonConstant.java @@ -0,0 +1,20 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util.constants; + +public class CommonConstant { + public static final String DEFAULT_PASSWORD = "******"; + public static final String ECB_PASSWORD_KEY = "ECB_PASSWORD_O2P0E2N4SPG"; + + public static final String TUNNEL_ENDPOINT = "TunnelEndpoint"; +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/converter/EnumToStringConverter.java b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/EnumToStringConverter.java new file mode 100644 index 000000000..9ea3dd1a2 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/EnumToStringConverter.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util.converter; + +import java.lang.reflect.Method; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.dozer.CustomConverter; +import org.dozer.MappingException; + +@Slf4j +public class EnumToStringConverter implements CustomConverter { + + @Override + public Object convert( + Object destination, Object source, Class destinationClass, Class sourceClass) { + if (source == null) { + return null; + } + if (source instanceof Enum) { + return getString(source); + } else if (source instanceof String) { + return getEnum(destinationClass, source.toString()); + } else { + throw new MappingException( + new StringBuilder("Converter ") + .append(this.getClass().getSimpleName()) + .append(" was used incorrectly. Arguments were: ") + .append(destinationClass.getClass().getName()) + .append(" and ") + .append(source) + .toString()); + } + } + + private Object getString(Object source) { + Enum em = (Enum) source; + return em.name(); + } + + private Object getEnum(Class destinationClass, String source) { + if (StringUtils.isBlank(source)) { + return null; + } + try { + Method m = destinationClass.getDeclaredMethod("valueOf", String.class); + Object enumeration = m.invoke(destinationClass.getClass(), source); + return enumeration; + } catch (Exception e) { + log.warn("EnumToStringConverter getEnum Exception source:" + source); + } + return null; + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToDateDozerConverter.java b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToDateDozerConverter.java new file mode 100644 index 000000000..93e6f5644 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToDateDozerConverter.java @@ -0,0 +1,37 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util.converter; + +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.util.Date; +import org.dozer.DozerConverter; + +public class LocalDateTimeToDateDozerConverter extends DozerConverter { + + public LocalDateTimeToDateDozerConverter() { + super(LocalDateTime.class, Date.class); + } + + @Override + public LocalDateTime convertFrom(Date source, LocalDateTime destination) { + return source == null + ? null + : LocalDateTime.ofInstant(source.toInstant(), ZoneId.systemDefault()); + } + + @Override + public Date convertTo(LocalDateTime source, Date destination) { + return source == null ? null : Date.from(source.atZone(ZoneId.systemDefault()).toInstant()); + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToLocalDateTimeDozerConverter.java b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToLocalDateTimeDozerConverter.java new file mode 100644 index 000000000..ce15ccc31 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateTimeToLocalDateTimeDozerConverter.java @@ -0,0 +1,34 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util.converter; + +import java.time.LocalDateTime; +import org.dozer.DozerConverter; + +public class LocalDateTimeToLocalDateTimeDozerConverter + extends DozerConverter { + + public LocalDateTimeToLocalDateTimeDozerConverter() { + super(LocalDateTime.class, LocalDateTime.class); + } + + @Override + public LocalDateTime convertTo(LocalDateTime source, LocalDateTime destination) { + return source; + } + + @Override + public LocalDateTime convertFrom(LocalDateTime source, LocalDateTime destination) { + return source; + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateToLocalDateDozerConverter.java b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateToLocalDateDozerConverter.java new file mode 100644 index 000000000..efdf2232d --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalDateToLocalDateDozerConverter.java @@ -0,0 +1,33 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util.converter; + +import java.time.LocalDate; +import org.dozer.DozerConverter; + +public class LocalDateToLocalDateDozerConverter extends DozerConverter { + + public LocalDateToLocalDateDozerConverter() { + super(LocalDate.class, LocalDate.class); + } + + @Override + public LocalDate convertFrom(LocalDate source, LocalDate destination) { + return source; + } + + @Override + public LocalDate convertTo(LocalDate source, LocalDate destination) { + return source; + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalTimeToLocalTimeDozerConverter.java b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalTimeToLocalTimeDozerConverter.java new file mode 100644 index 000000000..7add2dbee --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/converter/LocalTimeToLocalTimeDozerConverter.java @@ -0,0 +1,33 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.common.util.converter; + +import java.time.LocalTime; +import org.dozer.DozerConverter; + +public class LocalTimeToLocalTimeDozerConverter extends DozerConverter { + + public LocalTimeToLocalTimeDozerConverter() { + super(LocalTime.class, LocalTime.class); + } + + @Override + public LocalTime convertFrom(LocalTime source, LocalTime destination) { + return source; + } + + @Override + public LocalTime convertTo(LocalTime source, LocalTime destination) { + return source; + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jDriverManager.java b/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jDriverManager.java index d28a741e2..ff99552d6 100644 --- a/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jDriverManager.java +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jDriverManager.java @@ -44,7 +44,7 @@ public static Driver getNeo4jDriver(String uri, String user, String password) { driver = GraphDatabase.driver(uri, AuthTokens.basic(user, password), config); driver.verifyConnectivity(); } catch (Exception e) { - throw new RuntimeException("init Neo4j Client failed :" + uri, e); + throw new RuntimeException("init Neo4j Client failed :" + uri + "," + user, e); } instanceMap.put(uniqueKey, driver); } diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jIndexUtils.java b/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jIndexUtils.java index 2421167ca..9bbf35f38 100644 --- a/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jIndexUtils.java +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/neo4j/Neo4jIndexUtils.java @@ -257,12 +257,13 @@ public List textSearch( public List textSearch( @NonNull String queryString, @Nullable List labelConstraints, int topk) { - return textSearch(queryString, labelConstraints, topk, null); + return textSearch(queryString, labelConstraints, 0, topk, null); } public List textSearch( @NonNull String queryString, @Nullable List labelConstraints, + int page, int topk, @Nullable String indexName) { if (topk != -1 && topk <= 0) @@ -290,8 +291,9 @@ public List textSearch( sb.append(")"); } sb.append("\nRETURN node, score"); - sb.append(String.format("\nLIMIT %d", topk)); + sb.append(String.format("\nSKIP %d LIMIT %d", page, topk)); String query = sb.toString(); + log.info("search:" + query); try (Session session = driver.session(SessionConfig.forDatabase(database))) { return session.readTransaction( tx -> { diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PemjaUtils.java b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PemjaUtils.java new file mode 100644 index 000000000..0075bfdb1 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PemjaUtils.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.common.util.pemja; + +import com.antgroup.openspg.common.util.Md5Utils; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; +import org.apache.commons.collections4.MapUtils; +import org.apache.commons.lang3.StringUtils; +import pemja.core.PythonInterpreter; +import pemja.core.PythonInterpreterConfig; + +public class PemjaUtils { + + public static Object invoke(PemjaConfig config, Object... input) { + String uniqueKey = config.getClassName() + "_" + Md5Utils.md5Of(UUID.randomUUID().toString()); + PythonInterpreter interpreter = null; + try { + interpreter = getPythonInterpreter(config, uniqueKey); + return interpreter.invokeMethod(uniqueKey, config.getMethod(), input); + } finally { + if (interpreter != null) { + interpreter.close(); + } + } + } + + private static PythonInterpreter getPythonInterpreter(PemjaConfig config, String uniqueKey) { + PythonInterpreter interpreter = + newPythonInterpreter(config.getPythonExec(), config.getPythonPaths()); + if (config.getProjectId() != null) { + interpreter.exec("from kag.bridge.spg_server_bridge import init_kag_config"); + interpreter.exec( + String.format( + "init_kag_config(\"%s\",\"%s\")", config.getProjectId(), config.getHostAddr())); + } + interpreter.exec( + String.format("from %s import %s", config.getModulePath(), config.getClassName())); + interpreter.exec( + String.format( + "%s=%s(%s)", + uniqueKey, + config.getClassName(), + paramToPythonString(config.getParams(), config.getParamsPrefix()))); + return interpreter; + } + + public static PythonInterpreter newPythonInterpreter(String pythonExec, String pythonPaths) { + PythonInterpreterConfig.PythonInterpreterConfigBuilder builder = + PythonInterpreterConfig.newBuilder(); + if (StringUtils.isNotBlank(pythonExec)) { + builder.setPythonExec(pythonExec); + } + if (StringUtils.isNotBlank(pythonPaths)) { + String[] pythonPathList = pythonPaths.split(";"); + builder.addPythonPaths(pythonPathList); + } + return new PythonInterpreter(builder.build()); + } + + private static String paramToPythonString(Map params, String paramsPrefix) { + if (MapUtils.isEmpty(params)) { + return ""; + } + if (StringUtils.isBlank(paramsPrefix)) { + paramsPrefix = "**"; + } + String keyValue = + params.entrySet().stream() + .map(entry -> String.format("'%s': '%s'", entry.getKey(), entry.getValue())) + .collect(Collectors.joining(",")); + return String.format("%s{%s}", paramsPrefix, keyValue); + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java new file mode 100644 index 000000000..ccad547ae --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/PythonInvokeMethod.java @@ -0,0 +1,59 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.common.util.pemja; + +public enum PythonInvokeMethod { + BRIDGE_READER("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_reader", ""), + BRIDGE_COMPONENT("kag.bridge.spg_server_bridge", "SPGServerBridge", "run_component", ""), + BATCH_VECTORIZER( + "kag.builder.component.vectorizer.batch_vectorizer", "BatchVectorizer", "_handle", "**"), + LLM_CONFIG_CHECKER("kag.common.llm.llm_config_checker", "LLMConfigChecker", "check", ""), + VECTORIZER_CONFIG_CHECKER( + "kag.common.vectorize_model.vectorize_model_config_checker", + "VectorizeModelConfigChecker", + "check", + ""), + SOLVER_MAIN("kag.solver.main_solver", "SolverMain", "invoke", ""); + + String modulePath; + + String className; + + String method; + + String paramsPrefix; + + PythonInvokeMethod(String modulePath, String className, String method, String paramsPrefix) { + this.modulePath = modulePath; + this.className = className; + this.method = method; + this.paramsPrefix = paramsPrefix; + } + + public String getModulePath() { + return modulePath; + } + + public String getClassName() { + return className; + } + + public String getMethod() { + return method; + } + + public String getParamsPrefix() { + return paramsPrefix; + } +} diff --git a/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/model/PemjaConfig.java b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/model/PemjaConfig.java new file mode 100644 index 000000000..524681f67 --- /dev/null +++ b/common/util/src/main/java/com/antgroup/openspg/common/util/pemja/model/PemjaConfig.java @@ -0,0 +1,84 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.common.util.pemja.model; + +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import java.util.Map; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +@EqualsAndHashCode(callSuper = false) +public class PemjaConfig { + + private String pythonExec; + + private String pythonPaths; + + private String modulePath; + + private String className; + + private String method; + + private Long projectId; + + private String hostAddr; + + private Map params; + + private String paramsPrefix; + + public PemjaConfig( + String pythonExec, + String pythonPaths, + String hostAddr, + Long projectId, + String modulePath, + String className, + String method, + Map params, + String paramsPrefix) { + this.pythonExec = pythonExec; + this.pythonPaths = pythonPaths; + this.modulePath = modulePath; + this.className = className; + this.method = method; + this.params = params; + this.paramsPrefix = paramsPrefix; + this.projectId = projectId; + this.hostAddr = hostAddr; + } + + public PemjaConfig( + String pythonExec, + String pythonPaths, + String hostAddr, + Long projectId, + PythonInvokeMethod pythonInvoke, + Map params) { + this( + pythonExec, + pythonPaths, + hostAddr, + projectId, + pythonInvoke.getModulePath(), + pythonInvoke.getClassName(), + pythonInvoke.getMethod(), + params, + pythonInvoke.getParamsPrefix()); + } +} diff --git a/common/util/src/main/resources/dozer-custom-convert.xml b/common/util/src/main/resources/dozer-custom-convert.xml new file mode 100644 index 000000000..a94a2aeaf --- /dev/null +++ b/common/util/src/main/resources/dozer-custom-convert.xml @@ -0,0 +1,92 @@ + + + + + + + + + java.time.LocalDateTime + java.util.Date + + + java.time.LocalDateTime + java.time.LocalDateTime + + + java.time.LocalTime + java.time.LocalTime + + + java.time.LocalDate + java.time.LocalDate + + + java.lang.Enum + java.lang.String + + + + + com.antgroup.openspg.server.infra.dao.dataobject.SchedulerJobDO + com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob + + extension + extension + + + + com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInstanceDO + com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance + + extension + extension + + + taskDag + taskDag + + + + com.antgroup.openspg.server.infra.dao.dataobject.SchedulerTaskDO + com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask + + extension + extension + + + + com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInfoDO + com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo + + config + config + + + log + log + + + + com.antgroup.openspg.server.infra.dao.dataobject.DataSourceDO + com.antgroup.openspg.server.common.model.datasource.DataSource + + connectionInfo + connectionInfo + + + \ No newline at end of file diff --git a/dev/release/mysql/buildx-release-mysql.sh b/dev/release/mysql/buildx-release-mysql.sh index 9f1de15f3..ec759a155 100644 --- a/dev/release/mysql/buildx-release-mysql.sh +++ b/dev/release/mysql/buildx-release-mysql.sh @@ -10,7 +10,7 @@ # or implied. docker buildx build -f Dockerfile --platform linux/arm64/v8,linux/amd64 --push \ - -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-mysql:0.5.1 \ + -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-mysql:0.6 \ -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-mysql:latest \ -t openspg/openspg-mysql:0.5.1 \ -t openspg/openspg-mysql:latest \ diff --git a/dev/release/mysql/sql/initdb.sql b/dev/release/mysql/sql/initdb.sql index 9d3b39640..cc38fe5b5 100644 --- a/dev/release/mysql/sql/initdb.sql +++ b/dev/release/mysql/sql/initdb.sql @@ -75,9 +75,127 @@ CREATE TABLE `kg_builder_job` ( `status` varchar(32) DEFAULT NULL COMMENT '状态', `type` varchar(32) DEFAULT NULL COMMENT '类型', `extension` longtext DEFAULT NULL COMMENT '扩展信息', + `version` varchar(64) DEFAULT NULL COMMENT '版本号', + `life_cycle` varchar(64) DEFAULT NULL COMMENT '执行周期类型', + `action` varchar(64) DEFAULT NULL COMMENT '数据操作类型', + `computing_conf` longtext DEFAULT NULL COMMENT '计算引擎配置', PRIMARY KEY (`id`), KEY `idx_project_id` (`project_id`), KEY `idx_task_id` (`task_id`) ) DEFAULT CHARSET=utf8mb4 COMMENT='图谱构建任务表'; +CREATE TABLE `kg_scheduler_job` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `project_id` bigint(20) unsigned NOT NULL COMMENT '项目ID', + `gmt_create` timestamp NOT NULL DEFAULT current_timestamp() COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp() COMMENT '修改时间', + `create_user` varchar(32) DEFAULT NULL COMMENT '创建人', + `modify_user` varchar(32) DEFAULT NULL COMMENT '修改人', + `name` varchar(64) NOT NULL COMMENT '任务名称', + `life_cycle` varchar(64) NOT NULL COMMENT '调度周期类型', + `translate_type` varchar(64) NOT NULL COMMENT '任务转换类型', + `status` varchar(64) NOT NULL COMMENT '状态', + `dependence` varchar(64) NOT NULL COMMENT '前置依赖', + `scheduler_cron` varchar(128) DEFAULT NULL COMMENT '调度周期cron表达式', + `last_execute_time` timestamp NULL COMMENT '最后一次执行时间', + `invoker_id` bigint(20) unsigned DEFAULT NULL COMMENT '调用者id', + `extension` longtext DEFAULT NULL COMMENT '扩展信息', + `version` varchar(64) DEFAULT NULL COMMENT '版本号', + PRIMARY KEY (`id`), + KEY `idx_project_id` (`project_id`), + KEY `idx_projcet_create_user_name` (`project_id`,`create_user`) +) DEFAULT CHARSET=utf8mb4 COMMENT='调度任务表'; + +CREATE TABLE `kg_scheduler_instance` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `project_id` bigint(20) unsigned NOT NULL COMMENT '项目ID', + `gmt_create` timestamp NOT NULL DEFAULT current_timestamp() COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp() COMMENT '修改时间', + `create_user` varchar(32) DEFAULT NULL COMMENT '创建人', + `modify_user` varchar(32) DEFAULT NULL COMMENT '修改人', + `unique_id` varchar(128) NOT NULL COMMENT '调度实例唯一id', + `job_id` bigint(20) unsigned NOT NULL COMMENT '调度任务ID', + `type` varchar(64) NOT NULL COMMENT '实例类型', + `status` varchar(64) NOT NULL COMMENT '实例状态', + `progress` bigint(20) unsigned DEFAULT 0 COMMENT '进度', + `begin_running_time` timestamp NULL COMMENT '实例开始时间', + `finish_time` timestamp NULL COMMENT '实例完成时间', + `life_cycle` varchar(64) NOT NULL COMMENT '调度周期类型', + `dependence` varchar(64) NOT NULL COMMENT '前置依赖', + `scheduler_date` timestamp NULL COMMENT '调度执行时间', + `version` varchar(64) DEFAULT NULL COMMENT '版本号', + `extension` longtext DEFAULT NULL COMMENT '扩展信息', + `task_dag` longtext DEFAULT NULL COMMENT '示例调度DAG', + PRIMARY KEY (`id`), + KEY `idx_project_id` (`project_id`), + KEY `idx_job_id` (`job_id`), + UNIQUE KEY `uk_unique_id` (`unique_id`) +) DEFAULT CHARSET=utf8mb4 COMMENT='调度实例表'; + + +CREATE TABLE `kg_scheduler_task` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `project_id` bigint(20) unsigned NOT NULL COMMENT '项目ID', + `gmt_create` timestamp NOT NULL DEFAULT current_timestamp() COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp() COMMENT '修改时间', + `job_id` bigint(20) unsigned NOT NULL COMMENT '调度任务ID', + `instance_id` bigint(20) unsigned NOT NULL COMMENT '调度示例ID', + `type` varchar(64) NOT NULL COMMENT '类型', + `status` varchar(64) NOT NULL COMMENT '实例状态', + `title` varchar(128) NOT NULL COMMENT '节点标题', + `execute_num` bigint(20) unsigned DEFAULT 0 COMMENT '执行次数', + `begin_time` timestamp NULL COMMENT '开始执行时间', + `finish_time` timestamp NULL COMMENT '执行完成时间', + `estimate_finish_time` timestamp NULL COMMENT '预估完成时间', + `trace_log` longtext DEFAULT NULL COMMENT '执行日志', + `lock_time` timestamp NULL COMMENT '抢锁时间', + `resource` varchar(10240) DEFAULT NULL COMMENT '资源标记', + `input` longtext DEFAULT NULL COMMENT '输入信息', + `output` longtext DEFAULT NULL COMMENT '输出信息', + `node_id` varchar(64) NOT NULL COMMENT '节点id', + `extension` longtext DEFAULT NULL COMMENT '扩展信息', + PRIMARY KEY (`id`), + KEY `idx_project_id` (`project_id`), + KEY `idx_job_id` (`job_id`), + KEY `idx_instance_id` (`instance_id`), + KEY `idx_type_status` (`type`,`status`), + UNIQUE KEY `uk_instance_node_id` (`instance_id`,`node_id`) +) DEFAULT CHARSET=utf8mb4 COMMENT='调度作业节点表'; + + +CREATE TABLE `kg_scheduler_info` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `gmt_create` timestamp NOT NULL DEFAULT current_timestamp() COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT current_timestamp() ON UPDATE current_timestamp() COMMENT '修改时间', + `name` varchar(64) NOT NULL COMMENT '调度任务名称', + `status` varchar(32) DEFAULT NULL COMMENT '状态', + `period` bigint(20) DEFAULT 300 COMMENT '调度间隔,单位秒', + `count` bigint(20) unsigned DEFAULT 0 COMMENT '失败次数', + `log` longtext DEFAULT NULL COMMENT '日志内容', + `config` longtext DEFAULT NULL COMMENT '配置信息', + `lock_time` timestamp NULL COMMENT '抢锁时间', + PRIMARY KEY (`id`), + UNIQUE KEY `uk_name` (`name`) +) DEFAULT CHARSET=utf8mb4 COMMENT='调度任务记录表'; + +CREATE TABLE `kg_data_source`( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `gmt_create` timestamp NOT NULL DEFAULT current_timestamp() COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT current_timestamp() on update current_timestamp () COMMENT '修改时间', + `create_user` varchar(64) NOT NULL DEFAULT 'system' COMMENT '创建用户', + `update_user` varchar(64) NOT NULL DEFAULT 'system' COMMENT '修改用户', + `status` varchar(64) NOT NULL DEFAULT 'ENABLE' COMMENT '状态', + `remark` varchar(1024) DEFAULT NULL COMMENT '描述', + `type` varchar(64) NOT NULL DEFAULT 'MYSQL' COMMENT '数据源类型', + `db_name` varchar(256) NOT NULL COMMENT '数据源名称', + `db_url` varchar(1024) NOT NULL COMMENT '数据库url', + `db_user` varchar(128) DEFAULT NULL COMMENT '数据源用户名', + `db_password` varchar(128) DEFAULT NULL COMMENT '数据源密码', + `db_driver_name` varchar(128) DEFAULT NULL COMMENT '数据源驱动', + `category` varchar(64) DEFAULT NULL COMMENT '数据源类别', + `connection_info` longtext DEFAULT NULL COMMENT '连接配置信息', +PRIMARY KEY (`id`), +UNIQUE KEY `uk_db_name` (`db_name`) +) DEFAULT CHARSET=utf8mb4 COMMENT='数据源管理表'; + diff --git a/dev/release/mysql/sql/openspg-initdb.sql b/dev/release/mysql/sql/openspg-initdb.sql index a91678ccd..9cdac1fcb 100644 --- a/dev/release/mysql/sql/openspg-initdb.sql +++ b/dev/release/mysql/sql/openspg-initdb.sql @@ -233,6 +233,74 @@ CREATE TABLE `kg_ontology_ext` ( UNIQUE KEY `uk_id_type_field`(`resource_id`, `resource_type`, `ext_type`, `field`) ) AUTO_INCREMENT = 1 DEFAULT CHARSET = utf8mb4 COMMENT = 'schema的扩展属性'; +CREATE TABLE `kg_user` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', + `user_no` varchar(255) NOT NULL COMMENT '用户工号', + `token` varchar(255) NOT NULL COMMENT 'token', + `last_token` varchar(255) DEFAULT NULL COMMENT '修改前token', + `salt` varchar(255) NOT NULL COMMENT '随机字符串', + `gmt_last_token_disable` timestamp NULL DEFAULT NULL COMMENT 'token修改时间', + `dw_access_id` varchar(32) DEFAULT NULL COMMENT '数仓用户ID', + `dw_access_key` varchar(64) DEFAULT NULL COMMENT '数仓用户密钥', + `real_name` varchar(50) DEFAULT NULL COMMENT '用户真名', + `nick_name` varchar(50) DEFAULT NULL COMMENT '用户花名', + `email` varchar(64) DEFAULT NULL COMMENT '用户邮箱', + `domain_account` varchar(64) DEFAULT NULL COMMENT '用户域账号', + `mobile` varchar(64) DEFAULT NULL COMMENT '用户手机号', + `wx_account` varchar(64) DEFAULT NULL COMMENT '用户微信账号', + `config` text DEFAULT NULL COMMENT '配置,json', + PRIMARY KEY(`id`), + UNIQUE KEY `uk_userNo`(`user_no`), + UNIQUE KEY `uk_token`(`token`), + UNIQUE KEY `uk_domain_account`(`domain_account`) +) DEFAULT CHARSET = utf8mb4 COMMENT = '用户管理表'; + +CREATE TABLE `kg_config` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', + `project_id` varchar(64) NOT NULL COMMENT '项目id,可以为某个域的唯一值', + `config_name` varchar(64) NOT NULL COMMENT '配置名称', + `config_id` varchar(128) NOT NULL COMMENT '配置id', + `version` varchar(64) NOT NULL DEFAULT '1' COMMENT '配置版本', + `config` longtext NOT NULL COMMENT '配置,json', + `status` int(11) NOT NULL DEFAULT '1' COMMENT '状态,1有效', + `description` varchar(1024) DEFAULT NULL COMMENT '版本描述', + `resource_id` varchar(128) DEFAULT NULL COMMENT '资源id,用于外键关联schem视图', + `resource_type` varchar(128) DEFAULT 'CONFIG' COMMENT '资源类型', + `user_no` varchar(64) NOT NULL COMMENT '创建者', + PRIMARY KEY(`id`), + UNIQUE KEY `uk_configidversion`(`config_id`, `version`), + KEY `idx_projectid`(`project_id`) +) DEFAULT CHARSET = utf8mb4 COMMENT = '图谱配置表'; + +CREATE TABLE `kg_resource_permission` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键', + `gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', + `user_no` varchar(255) NOT NULL COMMENT '用户工号', + `resource_id` bigint(20) NOT NULL COMMENT '资源id', + `role_id` bigint(20) NOT NULL COMMENT '角色id', + `resource_tag` varchar(50) NOT NULL DEFAULT 'TYPE' COMMENT '资源分类', + `status` varchar(2) NOT NULL DEFAULT '99' COMMENT '状态。-1:驳回;99:审批中;1:有效;9:删除', + `expire_date` date DEFAULT NULL COMMENT '过期日期', + PRIMARY KEY(`id`), + UNIQUE KEY `uk_unique`(`user_no`, `resource_id`, `resource_tag`), + KEY `idx_resource`(`resource_id`, `role_id`) +) DEFAULT CHARSET = utf8mb4 COMMENT = '资源权限表'; + +CREATE TABLE `kg_role` ( + `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键,角色id', + `gmt_create` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', + `gmt_modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '修改时间', + `role_name` varchar(255) NOT NULL COMMENT '角色名', + `permission_detail` text DEFAULT NULL COMMENT '角色权限具体信息,json格式', + PRIMARY KEY(`id`), + UNIQUE KEY `uk_role_name`(`role_name`) +) DEFAULT CHARSET = utf8mb4 COMMENT = '平台角色表'; + INSERT INTO kg_biz_domain (`id`,`gmt_create`,`gmt_modified`,`name`,`status`,`description`,`global_config`) VALUES(1,'2023-09-01 00:00:00','2023-09-01 00:00:00','defaultTenant','VALID','',null); INSERT INTO kg_ontology_entity (`id`,`original_id`,`name`,`name_zh`,`entity_category`,`layer`,`description`,`description_zh`,`status`,`with_index`,`scope`,`version`,`version_status`,`gmt_create`,`gmt_modified`,`transformer_id`,`operator_config`,`config`,`unique_name`) VALUES(1,1,'Thing','事物','ADVANCED','EXTENSION','Base class for all schema types, all of which inherit the type either directly or indirectly','所有schema类型的基类,它们都直接或者间接继承该类型','1','TRUE','PUBLIC',44,'ONLINE','2023-09-01 00:00:00','2023-09-01 00:00:00',0,null,null,'Thing'); @@ -258,3 +326,15 @@ INSERT INTO kg_ontology_entity_parent (`id`,`entity_id`,`parent_id`,`status`,`gm INSERT INTO kg_ontology_entity_property_range (`id`,`domain_id`,`property_name`,`range_id`,`property_name_zh`,`constraint_id`,`property_category`,`map_type`,`version`,`status`,`gmt_create`,`gmt_modified`,`original_id`,`store_property_name`,`transformer_id`,`property_desc`,`property_desc_zh`,`project_id`,`original_domain_id`,`original_range_id`,`version_status`,`relation_source`,`direction`,`mask_type`,`multiver_config`,`property_source`,`property_config`) VALUES(1,1,'description',2,'描述',0,'BASIC','TYPE',44,'1','2022-03-21 19:24:54','2023-08-27 09:39:04',1,'description',0,null,null,0,1,2,'ONLINE',null,null,null,null,null,null); INSERT INTO kg_ontology_entity_property_range (`id`,`domain_id`,`property_name`,`range_id`,`property_name_zh`,`constraint_id`,`property_category`,`map_type`,`version`,`status`,`gmt_create`,`gmt_modified`,`original_id`,`store_property_name`,`transformer_id`,`property_desc`,`property_desc_zh`,`project_id`,`original_domain_id`,`original_range_id`,`version_status`,`relation_source`,`direction`,`mask_type`,`multiver_config`,`property_source`,`property_config`) VALUES(2,1,'id',2,'实体主键',0,'BASIC','TYPE',44,'1','2022-03-21 19:24:54','2023-08-27 09:39:04',2,'id',0,null,null,0,1,2,'ONLINE',null,null,null,null,null,null); INSERT INTO kg_ontology_entity_property_range (`id`,`domain_id`,`property_name`,`range_id`,`property_name_zh`,`constraint_id`,`property_category`,`map_type`,`version`,`status`,`gmt_create`,`gmt_modified`,`original_id`,`store_property_name`,`transformer_id`,`property_desc`,`property_desc_zh`,`project_id`,`original_domain_id`,`original_range_id`,`version_status`,`relation_source`,`direction`,`mask_type`,`multiver_config`,`property_source`,`property_config`) VALUES(3,1,'name',2,'名称',0,'BASIC','TYPE',44,'1','2022-03-21 19:24:54','2023-08-27 09:39:04',3,'name',0,null,null,0,1,2,'ONLINE',null,null,null,null,null,null); + +INSERT INTO kg_user (`gmt_create`,`gmt_modified`,`user_no`,`token`,`last_token`,`salt`,`gmt_last_token_disable`,`dw_access_id`,`dw_access_key`,`real_name`,`nick_name`,`email`,`domain_account`,`mobile`,`wx_account`,`config`) VALUES(now(),now(),'openspg','075Df6275475a739',null,'Ktu4O',null,null,'efea9c06f9a581fe392bab2ee9a0508b2878f958c1f422f8080999e7dc024b83','openspg','openspg',null,'openspg',null,null,'{"useCurrentLanguage":"zh-CN"}'); + +INSERT INTO kg_role (`id`,`gmt_create`,`gmt_modified`,`role_name`,`permission_detail`) VALUES(1,now(),now(),'SUPER',''); +INSERT INTO kg_role (`id`,`gmt_create`,`gmt_modified`,`role_name`,`permission_detail`) VALUES(2,now(),now(),'OWNER',''); +INSERT INTO kg_role (`id`,`gmt_create`,`gmt_modified`,`role_name`,`permission_detail`) VALUES(3,now(),now(),'MEMBER',''); + +INSERT INTO kg_resource_permission (`gmt_create`,`gmt_modified`,`user_no`,`resource_id`,`role_id`,`resource_tag`,`status`,`expire_date`) VALUES(now(),now(),'openspg',0,1,'PLATFORM','1',null); + +INSERT INTO kg_config (`id`,`gmt_create`,`gmt_modified`,`project_id`,`config_name`,`config_id`,`version`,`config`,`status`,`description`,`resource_id`,`resource_type`,`user_no`) VALUES(1,now(),now(),'0','KAG Support Model','KAG_SUPPORT_MODEL','1','[{"id":1,"vendor":"vllm","logo":"/img/logo/vllm.png","params":[{"ename":"base_url","cname":"base_url","required":true,"defaultValue":""},{"ename":"model","cname":"model","required":true,"defaultValue":""},{"ename":"desc","cname":"desc","required":true,"formProps":{"allowClear":true,"placeholder":"Please enter remarks for partitioning."}}]},{"id":2,"vendor":"maas","logo":"/img/logo/maas.png","params":[{"ename":"base_url","cname":"base_url","required":true,"defaultValue":""},{"ename":"api_key","cname":"api_key ","required":true,"defaultValue":""},{"ename":"model","cname":"model","required":true,"defaultValue":""},{"ename":"temperature","cname":"temperature","required":true,"formType":"number","defaultValue":0.7},{"ename":"stream","cname":"stream","required":true,"defaultValue":"False"},{"ename":"desc","cname":"desc","required":true,"formProps":{"allowClear":true,"placeholder":"Please enter remarks for partitioning."}}]},{"id":3,"vendor":"Ollama","logo":"/img/logo/ollama.png","params":[{"ename":"base_url","cname":"base_url","required":true,"defaultValue":""},{"ename":"model","cname":"model","required":true,"defaultValue":""},{"ename":"desc","cname":"desc","required":true,"formProps":{"allowClear":true,"placeholder":"Please enter remarks for partitioning."}}]}]',1,null,null,'SYSTEM_CONFIG',''); +INSERT INTO kg_config (`id`,`gmt_create`,`gmt_modified`,`project_id`,`config_name`,`config_id`,`version`,`config`,`status`,`description`,`resource_id`,`resource_type`,`user_no`) VALUES(2,now(),now(),'0','Global Configuration','KAG_CONFIG','1','',1,null,null,'CONFIG','admin'); +INSERT INTO kg_config (`id`,`gmt_create`,`gmt_modified`,`project_id`,`config_name`,`config_id`,`version`,`config`,`status`,`description`,`resource_id`,`resource_type`,`user_no`) VALUES(3,now(),now(),'0','KAG Environment Configuration','KAG_ENV','1','{"configTitle":{"graph_store":{"id":1,"title":[{"ename":"database","cname":"database","required":true,"defaultValue":"kag","formProps":{"disabled":true}},{"ename":"password","cname":"password","required":true,"defaultValue":""},{"ename":"uri","cname":"uri","required":true,"defaultValue":""},{"ename":"user","cname":"user","required":true,"defaultValue":""}]},"vectorizer":{"id":2,"title":[{"ename":"type","cname":"type","required":true,"defaultValue":"openai","formProps":{"disabled":true}},{"ename":"model","cname":"model","required":true,"defaultValue":""},{"ename":"base_url","cname":"base_url","required":true,"defaultValue":""},{"ename":"api_key","cname":"api_key","required":true,"defaultValue":""}]},"prompt":{"id":3,"title":[{"ename":"biz_scene","cname":"biz_scene","required":true,"defaultValue":""},{"ename":"language","cname":"language","required":true,"defaultValue":""}]}}}',1,null,null,'SYSTEM_CONFIG',''); diff --git a/dev/release/python/Dockerfile b/dev/release/python/Dockerfile index c480e88a8..68bfd5bd8 100644 --- a/dev/release/python/Dockerfile +++ b/dev/release/python/Dockerfile @@ -9,7 +9,7 @@ # is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. -FROM m.daocloud.io/ubuntu:20.04 +FROM docker.m.daocloud.io/ubuntu:20.04 # For apt to be noninteractive ENV DEBIAN_FRONTEND noninteractive @@ -44,10 +44,9 @@ RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \ RUN python3 -m venv /openspg_venv && \ . /openspg_venv/bin/activate && \ export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-$(dpkg --print-architecture) && \ - pip3 install openspg-kag==0.5.1b3 && \ + pip3 install openspg-kag==0.6 && \ pip3 install pemja==0.4.0 && \ - pip3 install -U "http://alps-common.oss-cn-hangzhou-zmf.aliyuncs.com/nscommon/shiji/nscommon-0.0.1.tar.gz" &&\ + pip3 install -U "http://alps-common.oss-cn-hangzhou-zmf.aliyuncs.com/nscommon/shiji/nscommon-0.0.1.tar.gz" && \ echo "if (tty -s); then \n . /openspg_venv/bin/activate \nfi" >> ~/.bashrc - -COPY openspg/ /openspg \ No newline at end of file +ADD openspg/dev/release/python/lib/builder*.jar /openspg_venv/lib/python3.8/site-packages/knext/builder/lib \ No newline at end of file diff --git a/dev/release/python/build-release-python-aliyun.sh b/dev/release/python/build-release-python-aliyun.sh index 221db7beb..cd9637343 100644 --- a/dev/release/python/build-release-python-aliyun.sh +++ b/dev/release/python/build-release-python-aliyun.sh @@ -10,7 +10,9 @@ # or implied. IMAGE="spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-python" -VERSION="0.5.1" +VERSION="0.6" +LATEST="latest" + cd ../../../../ docker build -f openspg/dev/release/python/Dockerfile --platform linux/arm64/v8 --push \ -t ${IMAGE}:${VERSION}-arm64 \ diff --git a/dev/release/server/buildx-release-server.sh b/dev/release/server/buildx-release-server.sh index b17ded4f4..96bbd4815 100644 --- a/dev/release/server/buildx-release-server.sh +++ b/dev/release/server/buildx-release-server.sh @@ -10,6 +10,6 @@ # or implied. docker buildx build -f Dockerfile --platform linux/arm64/v8,linux/amd64 --push \ - -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-server:0.5.1 \ + -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-server:0.6 \ -t spg-registry.cn-hangzhou.cr.aliyuncs.com/spg/openspg-server:latest \ . diff --git a/pom.xml b/pom.xml index 31cbafc4a..59e0be774 100644 --- a/pom.xml +++ b/pom.xml @@ -135,6 +135,16 @@ cloudext-interface-cache ${project.version} + + com.antgroup.openspg.cloudext + cloudext-interface-object-storage + ${project.version} + + + com.antgroup.openspg.cloudext + cloudext-interface-computing-engine + ${project.version} + com.antgroup.openspg.cloudext cloudext-impl-graph-store-tugraph @@ -160,6 +170,16 @@ cloudext-impl-cache-redis ${project.version} + + com.antgroup.openspg.cloudext + cloudext-impl-objectstorage-minio + ${project.version} + + + com.antgroup.openspg.cloudext + cloudext-impl-objectstorage-oss + ${project.version} + @@ -357,6 +377,27 @@ spring-web 5.3.25 + + net.sf.dozer + dozer + 5.4.0 + + + org.slf4j + slf4j-log4j12 + + + + + jakarta.annotation + jakarta.annotation-api + 1.3.5 + + + com.github.rholder + guava-retrying + 2.0.0 + @@ -572,12 +613,6 @@ true - - - **/*.py - - - **/*.yaml diff --git a/reasoner/common/src/main/java/com/antgroup/openspg/reasoner/common/constants/Constants.java b/reasoner/common/src/main/java/com/antgroup/openspg/reasoner/common/constants/Constants.java index 14f68e0b2..c1771352b 100644 --- a/reasoner/common/src/main/java/com/antgroup/openspg/reasoner/common/constants/Constants.java +++ b/reasoner/common/src/main/java/com/antgroup/openspg/reasoner/common/constants/Constants.java @@ -84,6 +84,10 @@ public class Constants { public static final Set CONCEPT_HYPERNYM_EDGE_TYPE_SET = Sets.newHashSet("isA", "locateAt"); + /** open hypernym predicate type */ + public static final Set OPEN_CONCEPT_HYPERNYM_PREDICATE = + Sets.newHashSet("isA", "locateAt", "mannerOf"); + public static final String CONCEPT_EDGE_EXPAND_FUNC_NAME = "concept_edge_expand"; /** allow throw exception in udf */ diff --git a/reasoner/runner/local-runner/src/main/java/com/antgroup/openspg/reasoner/runner/local/rdg/LocalRDG.java b/reasoner/runner/local-runner/src/main/java/com/antgroup/openspg/reasoner/runner/local/rdg/LocalRDG.java index 0cdb4e28e..167c719bb 100644 --- a/reasoner/runner/local-runner/src/main/java/com/antgroup/openspg/reasoner/runner/local/rdg/LocalRDG.java +++ b/reasoner/runner/local-runner/src/main/java/com/antgroup/openspg/reasoner/runner/local/rdg/LocalRDG.java @@ -206,7 +206,6 @@ private java.util.Map getStartIdWithHitRuleValue( sb.append(field._1).append(".").append(prop).append("=").append(values.get(prop)); } } - System.out.println(sb.toString()); java.util.List> startVertexList = kgGraph.getVertex(this.curRdgStartVertexAlias); for (IVertex startId : startVertexList) { diff --git a/reasoner/runner/runner-common/src/main/java/com/antgroup/openspg/reasoner/rdg/common/LinkEdgeImpl.java b/reasoner/runner/runner-common/src/main/java/com/antgroup/openspg/reasoner/rdg/common/LinkEdgeImpl.java index d9a3948cc..fbe42158f 100644 --- a/reasoner/runner/runner-common/src/main/java/com/antgroup/openspg/reasoner/rdg/common/LinkEdgeImpl.java +++ b/reasoner/runner/runner-common/src/main/java/com/antgroup/openspg/reasoner/rdg/common/LinkEdgeImpl.java @@ -14,6 +14,7 @@ package com.antgroup.openspg.reasoner.rdg.common; import com.antgroup.openspg.reasoner.common.constants.Constants; +import com.antgroup.openspg.reasoner.common.graph.edge.Direction; import com.antgroup.openspg.reasoner.common.graph.edge.IEdge; import com.antgroup.openspg.reasoner.common.graph.edge.impl.Edge; import com.antgroup.openspg.reasoner.common.graph.property.IProperty; @@ -151,6 +152,11 @@ public List> link(KgGraph kgGraph) { genTargetVertexTypes = Lists.newArrayList(linkedUdtfResult.getTargetVertexTypeList().get(i)); } + Direction direction = Direction.OUT; + if (CollectionUtils.isNotEmpty(linkedUdtfResult.getDirection()) + && i < linkedUdtfResult.getDirection().size()) { + direction = Direction.valueOf(linkedUdtfResult.getDirection().get(i)); + } if (genTargetVertexTypes.size() == 0) { throw new RuntimeException( "Linked edge target vertex type must contains at least one type"); @@ -171,20 +177,30 @@ public List> link(KgGraph kgGraph) { newVertexSet.add(new Vertex<>(targetId, vertexProperty)); Map props = new HashMap<>(linkedUdtfResult.getEdgePropertyMap()); - props.put(Constants.EDGE_TO_ID_KEY, targetIdStr); + Object from_id = sourceVertex.getValue().get(Constants.NODE_ID_KEY); + Object to_id = targetIdStr; + if (Objects.equals(direction, Direction.IN)) { + to_id = from_id; + from_id = targetIdStr; + } + props.put(Constants.EDGE_TO_ID_KEY, to_id); if (sourceVertex.getValue().isKeyExist(Constants.NODE_ID_KEY)) { - props.put( - Constants.EDGE_FROM_ID_KEY, sourceVertex.getValue().get(Constants.NODE_ID_KEY)); + props.put(Constants.EDGE_FROM_ID_KEY, from_id); } IProperty property = new EdgeProperty(props); // construct new edge - IEdge linkedEdge = new Edge<>(sourceId, targetId, property); + IEdge linkedEdge = + new Edge<>(sourceId, targetId, property, direction); + String edgeType = StringUtils.isNotEmpty(linkedUdtfResult.getEdgeType()) ? linkedUdtfResult.getEdgeType() : linkedEdgePattern.edge().funcName(); linkedEdge.setType(sourceId.getType() + "_" + edgeType + "_" + targetVertexType); + if (Objects.equals(direction, Direction.IN)) { + linkedEdge.setType(targetVertexType + "_" + edgeType + "_" + sourceId.getType()); + } String edgeAlias = pc.alias(); Set> newEdgeSet = diff --git a/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/builtin/udf/JsonStringGet.java b/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/builtin/udf/JsonStringGet.java index 35445ca03..37c032311 100644 --- a/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/builtin/udf/JsonStringGet.java +++ b/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/builtin/udf/JsonStringGet.java @@ -17,8 +17,6 @@ import com.alibaba.fastjson.JSONPath; import com.alibaba.fastjson.JSONValidator; import com.antgroup.openspg.reasoner.udf.model.UdfDefine; -import java.util.ArrayList; -import java.util.List; import java.util.Map; public class JsonStringGet { @@ -66,7 +64,6 @@ public Object jsonStrGet(String plainJson, String jsonPath) { public Object getRdfProperty(Object properties, String propKey) { if (properties instanceof Map) { Map objectMap = (Map) properties; - List jsonStrList = new ArrayList<>(); for (String key : objectMap.keySet()) { if (!key.contains("basicInfo")) { continue; diff --git a/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/model/LinkedUdtfResult.java b/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/model/LinkedUdtfResult.java index 742800c6c..4f463047a 100644 --- a/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/model/LinkedUdtfResult.java +++ b/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/model/LinkedUdtfResult.java @@ -28,6 +28,9 @@ public class LinkedUdtfResult { /** The target vertex id of linked edge */ private List targetVertexIdList = new ArrayList<>(); + /** The linked direction */ + private List direction = new ArrayList<>(); + /** The target vertex type of linked edge */ private List targetVertexTypeList = new ArrayList<>(); diff --git a/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/utils/DateUtils.java b/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/utils/DateUtils.java index 7657f098b..24d34434f 100644 --- a/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/utils/DateUtils.java +++ b/reasoner/udf/src/main/java/com/antgroup/openspg/reasoner/udf/utils/DateUtils.java @@ -84,4 +84,10 @@ public static long str2MillSecond(String date, String format) { throw new RuntimeException("date parse error...", e); } } + + /** convert date to string */ + public static String format(Date date, String pattern) { + SimpleDateFormat simpleDateFormat = getSimpleDateFormat(pattern); + return simpleDateFormat.format(date); + } } diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/Paged.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/Paged.java index fd215dad2..a22c259c7 100644 --- a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/Paged.java +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/Paged.java @@ -31,4 +31,11 @@ public class Paged implements Serializable { public Long totalPageNum() { return (total / pageSize) + ((total % pageSize == 0) ? 0 : 1); } + + public Paged() {} + + public Paged(Integer pageSize, Integer pageIdx) { + this.pageSize = pageSize; + this.pageIdx = pageIdx; + } } diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ConfigRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ConfigRequest.java new file mode 100644 index 000000000..e4f558b69 --- /dev/null +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ConfigRequest.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.api.facade.dto.common.request; + +import com.antgroup.openspg.server.common.model.base.BaseRequest; +import lombok.Data; + +@Data +public class ConfigRequest extends BaseRequest { + + private Long id; + + private String configName; + + private String configId; + + private String version; + + private String config; +} diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataQueryRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataQueryRequest.java new file mode 100644 index 000000000..4a8425f2b --- /dev/null +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataQueryRequest.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.api.facade.dto.common.request; + +import com.antgroup.openspg.server.common.model.base.BaseRequest; +import java.util.HashMap; +import java.util.Map; +import lombok.Data; + +@Data +public class DataQueryRequest extends BaseRequest { + private Long sessionId; + private Long projectId; + private String document; + private String instruction; + private String type; + private Map params = new HashMap<>(); +} diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataReasonerRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataReasonerRequest.java new file mode 100644 index 000000000..3c4a619f7 --- /dev/null +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/DataReasonerRequest.java @@ -0,0 +1,26 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.api.facade.dto.common.request; + +import java.util.HashMap; +import java.util.Map; +import lombok.Data; + +@Data +public class DataReasonerRequest { + private Long projectId; + private String dsl; + private Map params = new HashMap<>(); + private String type; + private String label; +} diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/PermissionRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/PermissionRequest.java new file mode 100644 index 000000000..d5797a975 --- /dev/null +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/PermissionRequest.java @@ -0,0 +1,32 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.api.facade.dto.common.request; + +import com.antgroup.openspg.server.common.model.base.BaseRequest; +import java.util.List; +import lombok.Data; + +@Data +public class PermissionRequest extends BaseRequest { + + private Long id; + + private String resourceTag; + + private String roleType; + + private List userNos; + + private List resourceIds; +} diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectCreateRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectCreateRequest.java index b11f716e8..62f304f09 100644 --- a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectCreateRequest.java +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectCreateRequest.java @@ -14,14 +14,16 @@ package com.antgroup.openspg.server.api.facade.dto.common.request; import com.antgroup.openspg.server.common.model.base.BaseRequest; +import lombok.Data; +@Data public class ProjectCreateRequest extends BaseRequest { private Long id; private String name; - private String desc; + private String description; private String namespace; @@ -30,60 +32,4 @@ public class ProjectCreateRequest extends BaseRequest { private String config; private Boolean autoSchema; - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getDesc() { - return desc; - } - - public void setDesc(String desc) { - this.desc = desc; - } - - public String getNamespace() { - return namespace; - } - - public void setNamespace(String namespace) { - this.namespace = namespace; - } - - public Long getTenantId() { - return tenantId; - } - - public void setTenantId(Long tenantId) { - this.tenantId = tenantId; - } - - public String getConfig() { - return config; - } - - public void setConfig(String config) { - this.config = config; - } - - public Boolean getAutoSchema() { - return autoSchema; - } - - public void setAutoSchema(Boolean autoSchema) { - this.autoSchema = autoSchema; - } } diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectQueryRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectQueryRequest.java index f125c0d18..d21b04316 100644 --- a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectQueryRequest.java +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/common/request/ProjectQueryRequest.java @@ -14,9 +14,12 @@ package com.antgroup.openspg.server.api.facade.dto.common.request; import com.antgroup.openspg.server.common.model.base.BaseRequest; +import java.util.List; public class ProjectQueryRequest extends BaseRequest { + private Long id; + private Long tenantId; private Long projectId; @@ -25,8 +28,12 @@ public class ProjectQueryRequest extends BaseRequest { private String namespace; + private String config; + private Boolean orderByGmtCreateDesc; + private List projectIdList; + public Long getTenantId() { return tenantId; } @@ -70,4 +77,28 @@ public Boolean getOrderByGmtCreateDesc() { public void setOrderByGmtCreateDesc(Boolean orderByGmtCreateDesc) { this.orderByGmtCreateDesc = orderByGmtCreateDesc; } + + public List getProjectIdList() { + return projectIdList; + } + + public void setProjectIdList(List projectIdList) { + this.projectIdList = projectIdList; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getConfig() { + return config; + } + + public void setConfig(String config) { + this.config = config; + } } diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/TextSearchRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/TextSearchRequest.java index 8b686214e..0d52f06be 100644 --- a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/TextSearchRequest.java +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/TextSearchRequest.java @@ -21,5 +21,6 @@ public class TextSearchRequest { private Long projectId; private String queryString; private Set labelConstraints; + private Integer page; private Integer topk; } diff --git a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/WriterGraphRequest.java b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/WriterGraphRequest.java index 9403d6a67..cb2a6042e 100644 --- a/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/WriterGraphRequest.java +++ b/server/api/facade/src/main/java/com/antgroup/openspg/server/api/facade/dto/service/request/WriterGraphRequest.java @@ -29,14 +29,17 @@ public class WriterGraphRequest extends BaseRequest { Boolean enableLeadTo; + String token; + public WriterGraphRequest() {} public WriterGraphRequest( - SubGraph subGraph, String operation, Long projectId, Boolean enableLeadTo) { + SubGraph subGraph, String operation, Long projectId, Boolean enableLeadTo, String token) { this.subGraph = subGraph; this.operation = operation; this.projectId = projectId; this.enableLeadTo = enableLeadTo; + this.token = token; } public SubGraph getSubGraph() { @@ -70,4 +73,12 @@ public Boolean getEnableLeadTo() { public void setEnableLeadTo(Boolean enableLeadTo) { this.enableLeadTo = enableLeadTo; } + + public String getToken() { + return token; + } + + public void setToken(String token) { + this.token = token; + } } diff --git a/server/api/http-client/pom.xml b/server/api/http-client/pom.xml index b61875c08..58232efdf 100644 --- a/server/api/http-client/pom.xml +++ b/server/api/http-client/pom.xml @@ -32,5 +32,11 @@ com.dtflys.forest forest-core + + javax.servlet + javax.servlet-api + 3.1.0 + compile + diff --git a/server/api/http-client/src/main/java/com/antgroup/openspg/server/api/http/client/account/AccountService.java b/server/api/http-client/src/main/java/com/antgroup/openspg/server/api/http/client/account/AccountService.java new file mode 100644 index 000000000..b292a4127 --- /dev/null +++ b/server/api/http-client/src/main/java/com/antgroup/openspg/server/api/http/client/account/AccountService.java @@ -0,0 +1,115 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.api.http.client.account; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.account.Account; +import java.io.IOException; +import java.util.List; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +/** get account from hr interface(ant) */ +public interface AccountService { + + /** + * get a login user from buc + * + * @return + */ + Account getLoginUser(); + + /** + * get accounts by part of account info + * + * @param keyword + * @return + */ + List getAccountByKeyword(String keyword); + + /** + * get account info by userNo + * + * @param userNo + * @return + */ + Account getByUserNo(String userNo); + + /** + * get account info by userNo with private info + * + * @param userNo + * @return + */ + Account getWithPrivateByUserNo(String userNo); + + /** + * create a new account + * + * @param account + * @return + */ + Integer create(Account account); + + /** + * update password + * + * @param account + * @return + */ + Integer updatePassword(Account account); + + /** + * delete account + * + * @param workNo + * @return + */ + Integer deleteAccount(String workNo); + + /** + * get account list + * + * @param account + * @param page + * @param size + * @return + */ + Paged getAccountList(String account, Integer page, Integer size); + + /** + * get sha256Hex password + * + * @param password + * @param salt + * @return + */ + String getSha256HexPassword(String password, String salt); + + Account getCurrentAccount(HttpServletRequest request) throws IOException; + + boolean login(HttpServletRequest request, HttpServletResponse response, Account account); + + String logout( + HttpServletRequest request, HttpServletResponse response, String workNo, String redirectUrl); + + /** + * update user config + * + * @param userNo + * @param config + * @return + */ + int updateUserConfig(String userNo, String config); +} diff --git a/server/api/http-server/pom.xml b/server/api/http-server/pom.xml index 0cd684e12..aa43acc29 100644 --- a/server/api/http-server/pom.xml +++ b/server/api/http-server/pom.xml @@ -56,5 +56,9 @@ com.antgroup.openspg.builder builder-runner-local + + com.antgroup.openspg.server + core-scheduler-service + diff --git a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/HttpBizTemplate.java b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/HttpBizTemplate.java index c52c8833f..61c26aefd 100644 --- a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/HttpBizTemplate.java +++ b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/HttpBizTemplate.java @@ -18,6 +18,7 @@ import com.antgroup.openspg.server.biz.common.util.BizThreadLocal; import com.antgroup.openspg.server.common.model.exception.IllegalParamsException; import com.antgroup.openspg.server.common.model.exception.OpenSPGException; +import com.antgroup.openspgapp.common.util.utils.exception.SpgAppException; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.http.HttpHeaders; @@ -76,6 +77,9 @@ public static HttpResult execute2(HttpBizCallback callback) { callback.check(); T result = callback.action(); httpResult = HttpResult.success(result); + } catch (SpgAppException e) { + log.error("execute http spg app exception", e); + httpResult = HttpResult.failed(e.getCode(), e.getMessage()); } catch (IllegalParamsException e) { log.error("error http illegal params", e); httpResult = HttpResult.failed("illegal params", e.getMessage()); diff --git a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/DataSourceController.java b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/DataSourceController.java new file mode 100644 index 000000000..832469bee --- /dev/null +++ b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/DataSourceController.java @@ -0,0 +1,259 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.api.http.server.openapi; + +import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.api.http.server.HttpBizCallback; +import com.antgroup.openspg.server.api.http.server.HttpBizTemplate; +import com.antgroup.openspg.server.api.http.server.HttpResult; +import com.antgroup.openspg.server.biz.common.util.AssertUtils; +import com.antgroup.openspg.server.common.model.CommonEnum; +import com.antgroup.openspg.server.common.model.datasource.Column; +import com.antgroup.openspg.server.common.model.datasource.DataSource; +import com.antgroup.openspg.server.common.model.datasource.DataSourceQuery; +import com.antgroup.openspg.server.common.service.datasource.DataSourceService; +import com.google.common.collect.Lists; +import java.util.List; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.ResponseBody; + +@Controller +@RequestMapping("/public/v1/datasource") +@Slf4j +public class DataSourceController { + + @Autowired private DataSourceService dataSourceService; + + @RequestMapping(value = "/insert", method = RequestMethod.POST) + @ResponseBody + public HttpResult insert(@RequestBody DataSource request) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/datasource/insert request: {}", JSON.toJSONString(request)); + AssertUtils.assertParamObjectIsNotNull("request", request); + AssertUtils.assertParamObjectIsNotNull("name", request.getDbName()); + AssertUtils.assertParamObjectIsNotNull("url", request.getDbUrl()); + } + + @Override + public Boolean action() { + return dataSourceService.insert(request) > 0; + } + }); + } + + @RequestMapping(value = "/update", method = RequestMethod.POST) + @ResponseBody + public HttpResult update(@RequestBody DataSource request) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/datasource/update request: {}", JSON.toJSONString(request)); + AssertUtils.assertParamObjectIsNotNull("request", request); + AssertUtils.assertParamObjectIsNotNull("id", request.getId()); + AssertUtils.assertParamObjectIsNotNull("name", request.getDbName()); + AssertUtils.assertParamObjectIsNotNull("url", request.getDbUrl()); + } + + @Override + public Boolean action() { + return dataSourceService.update(request) > 0; + } + }); + } + + @RequestMapping(value = "/delete", method = RequestMethod.GET) + @ResponseBody + public HttpResult delete(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/datasource/delete id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return dataSourceService.deleteById(id) > 0; + } + }); + } + + @RequestMapping(value = "/getById", method = RequestMethod.GET) + @ResponseBody + public HttpResult getById(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/datasource/getById id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public DataSource action() { + return dataSourceService.getById(id); + } + }); + } + + @RequestMapping(value = "/search", method = RequestMethod.POST) + @ResponseBody + public HttpResult> search(@RequestBody DataSourceQuery request) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/datasource/search request: {}", JSON.toJSONString(request)); + } + + @Override + public Paged action() { + return dataSourceService.query(request); + } + }); + } + + @RequestMapping(value = "/getAllDatabase", method = RequestMethod.GET) + @ResponseBody + public HttpResult> getAllDatabase(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/datasource/getAllDatabase id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public List action() { + return dataSourceService.getAllDatabase(id); + } + }); + } + + @RequestMapping(value = "/getAllTable", method = RequestMethod.GET) + @ResponseBody + public HttpResult> getAllTable(Long id, String dbName, String keyword) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/datasource/getAllDatabase id: {} dbName: {}", id, dbName); + AssertUtils.assertParamObjectIsNotNull("id", id); + AssertUtils.assertParamObjectIsNotNull("dbName", dbName); + } + + @Override + public List action() { + return dataSourceService.getAllTable(id, dbName, keyword); + } + }); + } + + @RequestMapping(value = "/getTableDetail", method = RequestMethod.GET) + @ResponseBody + public HttpResult> getTableDetail(Long id, String dbName, String tableName) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info( + "/datasource/getAllDatabase id: {} dbName: {} tableName: {}", + id, + dbName, + tableName); + AssertUtils.assertParamObjectIsNotNull("id", id); + AssertUtils.assertParamObjectIsNotNull("dbName", dbName); + AssertUtils.assertParamObjectIsNotNull("tableName", tableName); + } + + @Override + public List action() { + return dataSourceService.getTableDetail(id, dbName, tableName); + } + }); + } + + @RequestMapping(value = "/testConnect", method = RequestMethod.POST) + @ResponseBody + public HttpResult testConnect(@RequestBody DataSource request) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/datasource/testConnect request: {}", JSON.toJSONString(request)); + } + + @Override + public Boolean action() { + return dataSourceService.testConnect(request); + } + }); + } + + @RequestMapping(value = "/getDataSourceType", method = RequestMethod.GET) + @ResponseBody + public HttpResult> getDataSourceType(String category) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/datasource/getDataSourceType category: {}", category); + } + + @Override + public List action() { + return dataSourceService.getDataSourceType(category); + } + }); + } + + @RequestMapping(value = "/getDataSourceGroupByType", method = RequestMethod.POST) + @ResponseBody + public HttpResult> getDataSourceGroupByType(@RequestBody DataSourceQuery request) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/datasource/getDataSourceGroupByType request: {}", request); + } + + @Override + public List action() { + List types = Lists.newArrayList(); + List allType = dataSourceService.getGroupByType(request); + for (DataSource dataSource : allType) { + CommonEnum.DataSourceType type = dataSource.getType(); + if (type == null) { + continue; + } + types.add(new Column(type.name(), type.getCategory().name(), type.getName())); + } + + return types; + } + }); + } +} diff --git a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/GraphController.java b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/GraphController.java index ffe5681dc..548a15922 100644 --- a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/GraphController.java +++ b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/GraphController.java @@ -24,6 +24,7 @@ import com.antgroup.openspg.builder.model.record.SubGraphRecord; import com.antgroup.openspg.builder.runner.local.physical.sink.impl.GraphStoreSinkWriter; import com.antgroup.openspg.builder.runner.local.physical.sink.impl.Neo4jSinkWriter; +import com.antgroup.openspg.common.util.StringUtils; import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier; import com.antgroup.openspg.core.schema.model.type.BaseSPGType; import com.antgroup.openspg.core.schema.model.type.ConceptList; @@ -44,6 +45,7 @@ import java.util.List; import java.util.Map; import java.util.UUID; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; @@ -56,6 +58,7 @@ @Controller @RequestMapping("/public/v1/graph") +@Slf4j public class GraphController { @Value("${python.exec:}") @@ -64,8 +67,11 @@ public class GraphController { @Value("${python.paths:}") private String pythonPaths; - @Value("${python.knext.path:}") - private String pythonKnextPath; + @Value("${provision.server:true}") + private Boolean provisionServer; + + @Value("${provision.server.token:}") + private String provisionServerToken; @Autowired private GraphManager graphManager; @@ -166,6 +172,19 @@ public HttpResult writerGraph(@RequestBody WriterGraphRequest request) new HttpBizCallback() { @Override public void check() { + log.info( + "/public/v1/graph/writerGraph request: {} provisionServerToken: {}", + JSON.toJSONString(request), + provisionServerToken); + if (provisionServer != null && !provisionServer) { + if (StringUtils.isBlank(provisionServerToken) + || StringUtils.isBlank(request.getToken())) { + throw new RuntimeException("No service provided"); + } + if (!request.getToken().startsWith(provisionServerToken)) { + throw new RuntimeException("Token Authentication failure: " + request.getToken()); + } + } AssertUtils.assertParamObjectIsNotNull("request", request); AssertUtils.assertParamObjectIsNotNull("projectId", request.getProjectId()); AssertUtils.assertParamObjectIsNotNull("operation", request.getOperation()); @@ -189,7 +208,6 @@ public Boolean action() { .setCatalog(new DefaultBuilderCatalog(projectSchema, conceptLists)) .setPythonExec(pythonExec) .setPythonPaths(pythonPaths) - .setPythonKnextPath(pythonKnextPath) .setOperation(RecordAlterOperationEnum.valueOf(request.getOperation())) .setEnableLeadTo(enableLeadTo) .setProject(JSON.toJSONString(projectManager.queryById(request.getProjectId()))) diff --git a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/ProjectController.java b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/ProjectController.java index 251e31292..9230e7d0b 100644 --- a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/ProjectController.java +++ b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/ProjectController.java @@ -13,15 +13,20 @@ package com.antgroup.openspg.server.api.http.server.openapi; +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; import com.antgroup.openspg.server.api.facade.dto.common.request.ProjectCreateRequest; import com.antgroup.openspg.server.api.facade.dto.common.request.ProjectQueryRequest; import com.antgroup.openspg.server.api.facade.dto.schema.request.SchemaAlterRequest; import com.antgroup.openspg.server.api.http.server.BaseController; import com.antgroup.openspg.server.api.http.server.HttpBizCallback; import com.antgroup.openspg.server.api.http.server.HttpBizTemplate; +import com.antgroup.openspg.server.biz.common.ConfigManager; import com.antgroup.openspg.server.biz.common.ProjectManager; import com.antgroup.openspg.server.common.model.project.Project; +import com.antgroup.openspgapp.common.util.utils.SpgAppConstant; import java.util.List; +import org.apache.commons.compress.utils.Lists; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; @@ -38,6 +43,8 @@ public class ProjectController extends BaseController { @Autowired private ProjectManager projectManager; + @Autowired private ConfigManager configManager; + @RequestMapping(method = RequestMethod.POST) public ResponseEntity create(@RequestBody ProjectCreateRequest request) { return HttpBizTemplate.execute( @@ -74,7 +81,31 @@ public List action() { ProjectQueryRequest request = new ProjectQueryRequest(); request.setTenantId(tenantId); request.setProjectId(projectId); - return projectManager.query(request); + List projectList = projectManager.query(request); + List newProjectList = Lists.newArrayList(); + projectList.forEach( + project -> { + String config = project.getConfig(); + JSONObject configJson = JSON.parseObject(config); + if (configJson != null) { + configManager.backwardCompatible(configJson); + JSONObject vectorizer = + configManager.clearRedundantField( + configJson.getJSONObject(SpgAppConstant.VECTORIZER), + SpgAppConstant.VECTORIZER); + configJson.put(SpgAppConstant.VECTORIZER, vectorizer); + config = configJson.toJSONString(); + } + newProjectList.add( + new Project( + project.getId(), + project.getName(), + project.getDescription(), + project.getNamespace(), + project.getTenantId(), + config)); + }); + return newProjectList; } }); } diff --git a/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/SchedulerController.java b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/SchedulerController.java new file mode 100644 index 000000000..c28b57a56 --- /dev/null +++ b/server/api/http-server/src/main/java/com/antgroup/openspg/server/api/http/server/openapi/SchedulerController.java @@ -0,0 +1,316 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.api.http.server.openapi; + +import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.api.http.server.HttpBizCallback; +import com.antgroup.openspg.server.api.http.server.HttpBizTemplate; +import com.antgroup.openspg.server.api.http.server.HttpResult; +import com.antgroup.openspg.server.biz.common.util.AssertUtils; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.service.api.SchedulerService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.ResponseBody; + +@Controller +@RequestMapping("/public/v1/scheduler") +@Slf4j +public class SchedulerController { + + @Autowired private SchedulerService schedulerService; + + @RequestMapping(value = "/job/submit", method = RequestMethod.POST) + @ResponseBody + public HttpResult submitJob(@RequestBody SchedulerJob request) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/insert request: {}", JSON.toJSONString(request)); + AssertUtils.assertParamObjectIsNotNull("request", request); + AssertUtils.assertParamObjectIsNotNull("projectId", request.getProjectId()); + AssertUtils.assertParamObjectIsNotNull("name", request.getName()); + } + + @Override + public SchedulerJob action() { + return schedulerService.submitJob(request); + } + }); + } + + @RequestMapping(value = "/job/execute", method = RequestMethod.GET) + @ResponseBody + public HttpResult executeJob(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/executeJob id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.executeJob(id); + } + }); + } + + @RequestMapping(value = "/job/enable", method = RequestMethod.GET) + @ResponseBody + public HttpResult enableJob(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/enableJob id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.enableJob(id); + } + }); + } + + @RequestMapping(value = "/job/disable", method = RequestMethod.GET) + @ResponseBody + public HttpResult disableJob(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/disableJob id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.disableJob(id); + } + }); + } + + @RequestMapping(value = "/job/delete", method = RequestMethod.GET) + @ResponseBody + public HttpResult deleteJob(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/deleteJob id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.deleteJob(id); + } + }); + } + + @RequestMapping(value = "/job/update", method = RequestMethod.POST) + @ResponseBody + public HttpResult updateJob(@RequestBody SchedulerJob request) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/updateJob request: {}", JSON.toJSONString(request)); + AssertUtils.assertParamObjectIsNotNull("request", request); + AssertUtils.assertParamObjectIsNotNull("id", request.getId()); + AssertUtils.assertParamObjectIsNotNull("name", request.getName()); + AssertUtils.assertParamObjectIsNotNull("projectId", request.getProjectId()); + } + + @Override + public Boolean action() { + return schedulerService.updateJob(request); + } + }); + } + + @RequestMapping(value = "/job/getById", method = RequestMethod.GET) + @ResponseBody + public HttpResult getJobById(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/getJobById id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public SchedulerJob action() { + return schedulerService.getJobById(id); + } + }); + } + + @RequestMapping(value = "/job/search", method = RequestMethod.POST) + @ResponseBody + public HttpResult> searchJobs(@RequestBody SchedulerJobQuery request) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/scheduler/searchJobs request: {}", JSON.toJSONString(request)); + } + + @Override + public Paged action() { + return schedulerService.searchJobs(request); + } + }); + } + + @RequestMapping(value = "/instance/getById", method = RequestMethod.GET) + @ResponseBody + public HttpResult getInstanceById(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/getInstanceById id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public SchedulerInstance action() { + return schedulerService.getInstanceById(id); + } + }); + } + + @RequestMapping(value = "/instance/stop", method = RequestMethod.GET) + @ResponseBody + public HttpResult stopInstance(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/stopInstance id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.stopInstance(id); + } + }); + } + + @RequestMapping(value = "/instance/setFinish", method = RequestMethod.GET) + @ResponseBody + public HttpResult setFinishInstance(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/setFinishInstance id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.setFinishInstance(id); + } + }); + } + + @RequestMapping(value = "/instance/restart", method = RequestMethod.GET) + @ResponseBody + public HttpResult restartInstance(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/restartInstance id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.restartInstance(id); + } + }); + } + + @RequestMapping(value = "/instance/trigger", method = RequestMethod.GET) + @ResponseBody + public HttpResult triggerInstance(Long id) { + return HttpBizTemplate.execute2( + new HttpBizCallback() { + @Override + public void check() { + log.info("/scheduler/triggerInstance id: {}", id); + AssertUtils.assertParamObjectIsNotNull("id", id); + } + + @Override + public Boolean action() { + return schedulerService.triggerInstance(id); + } + }); + } + + @RequestMapping(value = "/instance/search", method = RequestMethod.POST) + @ResponseBody + public HttpResult> searchInstances( + @RequestBody SchedulerInstanceQuery request) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/scheduler/searchInstances request: {}", JSON.toJSONString(request)); + } + + @Override + public Paged action() { + return schedulerService.searchInstances(request); + } + }); + } + + @RequestMapping(value = "/task/search", method = RequestMethod.POST) + @ResponseBody + public HttpResult> searchJobs(@RequestBody SchedulerTaskQuery request) { + return HttpBizTemplate.execute2( + new HttpBizCallback>() { + @Override + public void check() { + log.info("/scheduler/searchTasks request: {}", JSON.toJSONString(request)); + } + + @Override + public Paged action() { + return schedulerService.searchTasks(request); + } + }); + } +} diff --git a/server/arks/sofaboot/src/main/resources/config/application-default.properties b/server/arks/sofaboot/src/main/resources/config/application-default.properties index 43bbafe1f..56a8545ea 100644 --- a/server/arks/sofaboot/src/main/resources/config/application-default.properties +++ b/server/arks/sofaboot/src/main/resources/config/application-default.properties @@ -18,6 +18,7 @@ spring.application.name=openspg spring.servlet.multipart.max-file-size=100GB spring.servlet.multipart.max-request-size=100GB +spring.main.allow-circular-references=true # system server.port=8887 @@ -52,8 +53,5 @@ dialog.upload.dir=/upload # Scheduler scheduler.handler.type=local scheduler.metadata.store.type=local -scheduler.execute.instances.period=5 -scheduler.execute.instances.unit=MINUTES -scheduler.generate.instances.period=1 -scheduler.generate.instances.unit=HOURS +scheduler.handler.process.period=300 scheduler.execute.max.day=10 diff --git a/server/biz/common/pom.xml b/server/biz/common/pom.xml index 6148a9747..534d81e11 100644 --- a/server/biz/common/pom.xml +++ b/server/biz/common/pom.xml @@ -39,5 +39,13 @@ com.alibaba pemja + + com.antgroup.openspgapp + common-util + + + com.antgroup.openspg.server + core-schema-service + diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/AccountManager.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/AccountManager.java new file mode 100644 index 000000000..023b861c5 --- /dev/null +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/AccountManager.java @@ -0,0 +1,111 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.biz.common; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.account.Account; +import java.util.Collection; +import java.util.List; + +/** account manager */ +public interface AccountManager { + + /** + * create a new account + * + * @param account + * @return + */ + Integer create(Account account); + + /** + * get account info by userNo + * + * @param userNo + * @return + */ + Account getByUserNo(String userNo); + + /** + * get account info by userNo with private info + * + * @param userNo + * @return + */ + Account getWithPrivateByUserNo(String userNo); + + /** + * get account by part of userNO or nickName or realName + * + * @param keyword + * @return + */ + List query(String keyword); + + /** + * get account list + * + * @param loginAccount + * @param page + * @param size + * @return + */ + Paged getAccountList(String loginAccount, Integer page, Integer size); + + /** + * batch get simple user by userNo list + * + * @param userNos + * @return + */ + List getSimpleAccountByUserNoList(Collection userNos); + + /** + * update password + * + * @param account + * @return + */ + Integer updatePassword(Account account); + + /** + * delete account + * + * @param workNo + * @return + */ + Integer deleteAccount(String workNo); + + /** + * get sha256Hex password + * + * @param password + * @param salt + * @return + */ + String getSha256HexPassword(String password, String salt); + + String createSalt(); + + String createToken(String str); + + /** + * update user config + * + * @param userNo + * @param config + * @return + */ + int updateUserConfig(String userNo, String config); +} diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ConfigManager.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ConfigManager.java new file mode 100644 index 000000000..01050e761 --- /dev/null +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ConfigManager.java @@ -0,0 +1,92 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.biz.common; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.server.api.facade.dto.common.request.ConfigRequest; +import com.antgroup.openspg.server.common.model.config.Config; + +/** kag global configuration */ +public interface ConfigManager { + + /** + * get a config + * + * @param configId + * @param version + * @return + */ + Config query(String configId, String version); + + /** + * get a config by id + * + * @param id + * @return + */ + Config getById(Long id); + + /** + * create global config + * + * @param request + * @return + */ + Integer create(ConfigRequest request); + + /** + * update global config + * + * @param request + * @return + */ + Integer update(ConfigRequest request); + + /** + * set api key desensitization + * + * @param configStr + * @return + */ + String setApiKeyDesensitization(String configStr); + + /** + * handle api key desensitization + * + * @param config + * @param oldConfig + */ + void handleApiKey(JSONObject config, String oldConfig); + + /** + * generate llm id + * + * @param config + */ + void generateLLMIdCompletionLLM(JSONObject config); + + /** + * backward compatible + * + * @param config + */ + void backwardCompatible(JSONObject config); + + /** + * clear redundant field + * + * @param jsonObject model json config + */ + JSONObject clearRedundantField(JSONObject jsonObject, String configType); +} diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/PermissionManager.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/PermissionManager.java new file mode 100644 index 000000000..f83ac088d --- /dev/null +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/PermissionManager.java @@ -0,0 +1,114 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.biz.common; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.api.facade.dto.common.request.PermissionRequest; +import com.antgroup.openspg.server.common.model.permission.Permission; +import java.util.List; + +/** permission manager */ +public interface PermissionManager { + + /** + * create permission of some resource + * + * @param request + * @return + */ + Integer create(PermissionRequest request); + + /** + * update permission of some resource + * + * @param request + * @return + */ + Integer update(PermissionRequest request); + + /** + * query page permission or query by condition + * + * @param userNo + * @param roleType + * @param resourceId + * @param resourceTag + * @param page + * @param size + * @return + */ + Paged query( + String userNo, + String roleType, + Long resourceId, + String resourceTag, + Integer page, + Integer size); + + /** + * remove permission + * + * @param request + * @return + */ + Integer removePermission(PermissionRequest request); + + /** + * get user has permission permission + * + * @param resourceIds + * @param userNo + * @param roleType + * @param resourceTag + * @return + */ + List getPermissionByUserRolesAndId( + List resourceIds, String userNo, String roleType, String resourceTag); + + /** + * get by userNo and resourceTag + * + * @param userNo + * @param resourceTag + * @return + */ + List getPermissionByUserNoAndResourceTag(String userNo, String resourceTag); + + boolean isSuper(String userNo); + + /** + * is project role + * + * @param userNo + * @param projectId + * @return + */ + boolean isProjectRole(String userNo, Long projectId); + + /** + * get owner user name by project id + * + * @param projectId + * @return + */ + List getOwnerUserNameByProjectId(Long projectId); + + /** + * get by id + * + * @param id + * @return + */ + Permission selectByPrimaryKey(Long id); +} diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ProjectManager.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ProjectManager.java index 5ab625589..0945c5a1b 100644 --- a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ProjectManager.java +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/ProjectManager.java @@ -48,4 +48,12 @@ public interface ProjectManager { * @return SearchEngine url */ String getSearchEngineUrl(Long projectId); + + /** + * query project by namespace + * + * @param namespace + * @return + */ + Project queryByNamespace(String namespace); } diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/AccountManagerImpl.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/AccountManagerImpl.java new file mode 100644 index 000000000..0db1e8ed2 --- /dev/null +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/AccountManagerImpl.java @@ -0,0 +1,164 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.biz.common.impl; + +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.biz.common.AccountManager; +import com.antgroup.openspg.server.biz.common.PermissionManager; +import com.antgroup.openspg.server.common.model.account.Account; +import com.antgroup.openspg.server.common.model.permission.Permission; +import com.antgroup.openspg.server.common.service.account.AccountRepository; +import com.antgroup.openspgapp.common.util.enums.PermissionEnum; +import com.antgroup.openspgapp.common.util.enums.ResourceTagEnum; +import com.google.common.collect.Lists; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Random; +import org.apache.commons.codec.digest.DigestUtils; +import org.apache.commons.collections4.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class AccountManagerImpl implements AccountManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(AccountManagerImpl.class); + + @Autowired private AccountRepository accountRepository; + + @Autowired private PermissionManager permissionManager; + + @Override + public Integer create(Account account) { + if (account == null || StringUtils.isBlank(account.getWorkNo())) { + return 0; + } + return accountRepository.insert(account); + } + + @Override + public Account getByUserNo(String userNo) { + return accountRepository.selectByUserNo(userNo); + } + + @Override + public Account getWithPrivateByUserNo(String userNo) { + return accountRepository.selectWithPrivateByUserNo(userNo); + } + + @Override + public List query(String keyword) { + return accountRepository.query(keyword); + } + + @Override + public Paged getAccountList(String loginAccount, Integer page, Integer size) { + List superUserList = + permissionManager.getPermissionByUserRolesAndId( + Lists.newArrayList(0L), null, null, ResourceTagEnum.PLATFORM.name()); + Map> userRoleNamesMap = new HashMap<>(); + superUserList.forEach( + permission -> { + userRoleNamesMap + .computeIfAbsent(permission.getUserNo(), k -> Lists.newArrayList()) + .add(PermissionEnum.getRoleTypeById(permission.getRoleId()).name()); + }); + Paged accountPaged = accountRepository.getAccountList(loginAccount, page, size); + if (accountPaged != null && CollectionUtils.isNotEmpty(accountPaged.getResults())) { + accountPaged + .getResults() + .forEach(account -> account.setRoleNames(userRoleNamesMap.get(account.getWorkNo()))); + } + return accountPaged; + } + + @Override + public List getSimpleAccountByUserNoList(Collection userNos) { + return accountRepository.getSimpleAccountByUserNoList(userNos); + } + + @Override + public Integer updatePassword(Account account) { + if (account == null + || StringUtils.isBlank(account.getWorkNo()) + || StringUtils.isBlank(account.getPassword())) { + return 0; + } + Account oldAccount = accountRepository.selectWithPrivateByUserNo(account.getWorkNo()); + Account record = new Account(); + record.setWorkNo(account.getWorkNo()); + record.setPassword(getSha256HexPassword(account.getPassword(), oldAccount.getSalt())); + return accountRepository.updateByUserNo(record); + } + + @Override + public Integer deleteAccount(String userNo) { + if (StringUtils.isBlank(userNo)) { + return 0; + } + return accountRepository.deleteByUserNo(userNo); + } + + @Override + public String getSha256HexPassword(String password, String salt) { + return DigestUtils.sha256Hex(password + salt); + } + + /** + * Create salt string. + * + * @return the string + */ + @Override + public String createSalt() { + String charsBag = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; + String randomStr = ""; + Random random = new Random(); + for (int i = 0; i < 5; i++) { + randomStr += charsBag.charAt(random.nextInt(charsBag.length() - 1)); + } + return randomStr; + } + + @Override + public String createToken(String str) { + Locale defloc = Locale.getDefault(); + String token = ""; + try { + String token32 = DigestUtils.md5Hex(str); + String token16 = token32.substring(8, 24); + for (int i = 0; i < token16.length(); i++) { + String tokenIndex = token16.substring(i, i + 1); + if (tokenIndex.compareTo("9") > 0 && Math.random() > 0.5) { + tokenIndex = tokenIndex.toUpperCase(defloc); + } + token += tokenIndex; + } + } catch (Exception e) { + LOGGER.warn("create token error: str=" + str, e); + } + return token; + } + + @Override + public int updateUserConfig(String userNo, String config) { + return accountRepository.updateUserConfig(userNo, config); + } +} diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ConfigManagerImpl.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ConfigManagerImpl.java new file mode 100644 index 000000000..d2d5245e1 --- /dev/null +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ConfigManagerImpl.java @@ -0,0 +1,388 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.biz.common.impl; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONArray; +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.server.api.facade.dto.common.request.ConfigRequest; +import com.antgroup.openspg.server.biz.common.ConfigManager; +import com.antgroup.openspg.server.common.model.config.Config; +import com.antgroup.openspg.server.common.service.config.ConfigRepository; +import com.antgroup.openspgapp.common.util.enums.ModelType; +import com.antgroup.openspgapp.common.util.utils.SpgAppConstant; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.apache.commons.collections4.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class ConfigManagerImpl implements ConfigManager { + + private static final Logger LOGGER = LoggerFactory.getLogger(ConfigManagerImpl.class); + + @Autowired private ConfigRepository configRepository; + + @Override + public Config query(String configId, String version) { + return configRepository.query(configId, version); + } + + @Override + public Config getById(Long id) { + return configRepository.getById(id); + } + + @Override + public Integer create(ConfigRequest request) { + Config config = toModel(request); + return configRepository.save(config); + } + + @Override + public Integer update(ConfigRequest request) { + Config oldConfig = getById(request.getId()); + if (oldConfig == null) { + return 0; + } + Config config = toModel(request); + return configRepository.update(config); + } + + @Override + public String setApiKeyDesensitization(String configStr) { + if (StringUtils.isBlank(configStr)) { + return configStr; + } + JSONObject config = JSON.parseObject(configStr); + JSONObject vectorizerJson = config.getJSONObject(SpgAppConstant.VECTORIZER); + if (vectorizerJson != null && vectorizerJson.containsKey(SpgAppConstant.API_KEY)) { + vectorizerJson.put(SpgAppConstant.API_KEY, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY); + config.put(SpgAppConstant.VECTORIZER, vectorizerJson); + } + JSONArray llmArray = config.getJSONArray(SpgAppConstant.LLM_SELECT); + if (CollectionUtils.isNotEmpty(llmArray)) { + for (int i = 0; i < llmArray.size(); i++) { + JSONObject llm = llmArray.getJSONObject(i); + if (llm == null) { + continue; + } + if (llm.containsKey(SpgAppConstant.API_KEY)) { + llm.put(SpgAppConstant.API_KEY, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY); + } + if (llm.containsKey(SpgAppConstant.KEY)) { + llm.put(SpgAppConstant.KEY, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY); + } + } + config.put(SpgAppConstant.LLM_SELECT, llmArray); + } + JSONObject llm = config.getJSONObject(SpgAppConstant.LLM); + if (llm != null) { + if (llm.containsKey(SpgAppConstant.API_KEY)) { + llm.put(SpgAppConstant.API_KEY, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY); + } + if (llm.containsKey(SpgAppConstant.KEY)) { + llm.put(SpgAppConstant.KEY, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY); + } + } + JSONObject graphStore = config.getJSONObject(SpgAppConstant.GRAPH_STORE); + if (graphStore != null) { + graphStore.put(SpgAppConstant.PASSWORD, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY); + config.put(SpgAppConstant.GRAPH_STORE, graphStore); + } + return config.toJSONString(); + } + + @Override + public void handleApiKey(JSONObject config, String oldConfig) { + if (config == null || StringUtils.isBlank(oldConfig)) { + return; + } + JSONObject oldConfigJson = JSON.parseObject(oldConfig); + JSONObject vectorizer = config.getJSONObject(SpgAppConstant.VECTORIZER); + if (vectorizer != null) { + String apiKey = vectorizer.getString(SpgAppConstant.API_KEY); + if (vectorizer.containsKey(SpgAppConstant.API_KEY) + && StringUtils.equals(apiKey, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY)) { + String oldApiKey = + oldConfigJson + .getJSONObject(SpgAppConstant.VECTORIZER) + .getString(SpgAppConstant.API_KEY); + vectorizer.put(SpgAppConstant.API_KEY, oldApiKey); + config.put(SpgAppConstant.VECTORIZER, vectorizer); + } + } + JSONArray oldLlmArray = oldConfigJson.getJSONArray(SpgAppConstant.LLM_SELECT); + Map oldLlmMap = new HashMap<>(); + if (CollectionUtils.isNotEmpty(oldLlmArray)) { + for (int i = 0; i < oldLlmArray.size(); i++) { + JSONObject oldLlm = oldLlmArray.getJSONObject(i); + if (oldLlm == null) { + continue; + } + oldLlmMap.put(oldLlm.getString(SpgAppConstant.LLM_ID), oldLlm); + } + } + + JSONArray llmArray = config.getJSONArray(SpgAppConstant.LLM_SELECT); + if (CollectionUtils.isNotEmpty(llmArray)) { + for (int i = 0; i < llmArray.size(); i++) { + JSONObject llm = llmArray.getJSONObject(i); + if (llm == null) { + continue; + } + JSONObject oldLlm = oldLlmMap.get(llm.getString(SpgAppConstant.LLM_ID)); + if (null == oldLlm) { + JSONObject oldLlmJson = oldConfigJson.getJSONObject(SpgAppConstant.LLM); + if (oldLlmJson != null + && StringUtils.equals( + oldLlmJson.getString(SpgAppConstant.TYPE), llm.getString(SpgAppConstant.TYPE)) + && StringUtils.equals( + oldLlmJson.getString(SpgAppConstant.MODEL), + llm.getString(SpgAppConstant.MODEL))) { + oldLlm = oldLlmJson; + } + } + if (oldLlm == null) { + continue; + } + if (llm.containsKey(SpgAppConstant.API_KEY) + && StringUtils.equals( + llm.getString(SpgAppConstant.API_KEY), SpgAppConstant.DEFAULT_VECTORIZER_API_KEY)) { + String llmApiKey = oldLlm.getString(SpgAppConstant.API_KEY); + llm.put(SpgAppConstant.API_KEY, llmApiKey); + } + if (llm.containsKey(SpgAppConstant.KEY) + && StringUtils.equals( + llm.getString(SpgAppConstant.KEY), SpgAppConstant.DEFAULT_VECTORIZER_API_KEY)) { + String llmApiKey = oldLlm.getString(SpgAppConstant.KEY); + llm.put(SpgAppConstant.KEY, llmApiKey); + } + } + config.put(SpgAppConstant.LLM_SELECT, llmArray); + } + + JSONObject llm = config.getJSONObject(SpgAppConstant.LLM); + if (llm != null) { + JSONObject oldLlm = oldConfigJson.getJSONObject(SpgAppConstant.LLM); + if (oldLlm == null) { + oldLlm = oldLlmMap.get(llm.getString(SpgAppConstant.LLM_ID)); + } + if (oldLlm != null) { + if (llm.containsKey(SpgAppConstant.API_KEY) + && StringUtils.equals( + llm.getString(SpgAppConstant.API_KEY), SpgAppConstant.DEFAULT_VECTORIZER_API_KEY)) { + String llmApiKey = oldLlm.getString(SpgAppConstant.API_KEY); + llm.put(SpgAppConstant.API_KEY, llmApiKey); + } + if (llm.containsKey(SpgAppConstant.KEY) + && StringUtils.equals( + llm.getString(SpgAppConstant.KEY), SpgAppConstant.DEFAULT_VECTORIZER_API_KEY)) { + String llmApiKey = oldLlm.getString(SpgAppConstant.KEY); + llm.put(SpgAppConstant.KEY, llmApiKey); + } + } + } + + JSONObject graphStore = config.getJSONObject(SpgAppConstant.GRAPH_STORE); + if (graphStore != null) { + String password = graphStore.getString(SpgAppConstant.PASSWORD); + if (StringUtils.equals(password, SpgAppConstant.DEFAULT_VECTORIZER_API_KEY)) { + String oldPassword = + oldConfigJson + .getJSONObject(SpgAppConstant.GRAPH_STORE) + .getString(SpgAppConstant.PASSWORD); + graphStore.put(SpgAppConstant.PASSWORD, oldPassword); + config.put(SpgAppConstant.GRAPH_STORE, graphStore); + } + } + } + + @Override + public void generateLLMIdCompletionLLM(JSONObject config) { + if (config == null) { + return; + } + JSONArray llmArray = config.getJSONArray(SpgAppConstant.LLM_SELECT); + if (llmArray != null) { + for (int i = 0; i < llmArray.size(); i++) { + JSONObject llm = llmArray.getJSONObject(i); + if (llm == null) { + continue; + } + if (llm.getBooleanValue(SpgAppConstant.DEFAULT)) { + config.put(SpgAppConstant.LLM, clearRedundantField(llm, SpgAppConstant.LLM)); + } else { + llm.remove(SpgAppConstant.LLM); + } + if (StringUtils.isBlank(llm.getString(SpgAppConstant.LLM_ID))) { + String llmId = UUID.randomUUID().toString(); + llm.put(SpgAppConstant.LLM_ID, llmId); + } + } + config.put(SpgAppConstant.LLM_SELECT, llmArray); + } + } + + @Override + public void backwardCompatible(JSONObject config) { + if (config == null) { + return; + } + // vectorizer 0.5 -> 0.6 + JSONObject vectorizerJson = config.getJSONObject(SpgAppConstant.VECTORIZER); + if (vectorizerJson != null) { + if (vectorizerJson.containsKey(SpgAppConstant.VECTORIZER)) { + vectorizerJson.remove(SpgAppConstant.VECTORIZER); + } + if (!vectorizerJson.containsKey(SpgAppConstant.TYPE)) { + vectorizerJson.put(SpgAppConstant.TYPE, SpgAppConstant.OPENAI); + } + String model = vectorizerJson.getString(SpgAppConstant.MODEL); + if (StringUtils.equals(model, "bge-m3")) { + vectorizerJson.put(SpgAppConstant.MODEL, "BAAI/bge-m3"); + } + config.put(SpgAppConstant.VECTORIZER, vectorizerJson); + } + + // llm 0.5 -> 0.6 + JSONObject llmJson = config.getJSONObject(SpgAppConstant.LLM); + if (llmJson != null) { + backwardCompatibleLLM(llmJson); + config.put(SpgAppConstant.LLM, llmJson); + } + + // llm_select 0.5 -> 0.6 + JSONArray llmSelectJson = config.getJSONArray(SpgAppConstant.LLM_SELECT); + if (CollectionUtils.isNotEmpty(llmSelectJson)) { + for (int i = 0; i < llmSelectJson.size(); i++) { + JSONObject llm = llmSelectJson.getJSONObject(i); + backwardCompatibleLLM(llm); + } + config.put(SpgAppConstant.LLM_SELECT, llmSelectJson); + } else if (CollectionUtils.isEmpty(llmSelectJson) && llmJson != null) { + llmSelectJson = new JSONArray(); + JSONObject llm = JSON.parseObject(llmJson.toJSONString()); + String llmId = UUID.randomUUID().toString(); + backwardCompatibleLLM(llm); + llm.put(SpgAppConstant.LLM_ID, llmId); + llm.put(SpgAppConstant.DEFAULT, true); + llm.put(SpgAppConstant.DESC, ""); + llmSelectJson.add(llm); + config.put(SpgAppConstant.LLM_SELECT, llmSelectJson); + } + } + + @Override + public JSONObject clearRedundantField(JSONObject jsonObject, String configType) { + if (jsonObject == null) { + return new JSONObject(); + } + List llmKeyList; + if (StringUtils.equals(configType, SpgAppConstant.LLM)) { + llmKeyList = + Arrays.asList( + SpgAppConstant.TYPE, + SpgAppConstant.BASE_URL, + SpgAppConstant.API_KEY, + SpgAppConstant.MODEL, + SpgAppConstant.TEMPERATURE, + SpgAppConstant.STREAM); + } else if (StringUtils.equals(configType, SpgAppConstant.VECTORIZER)) { + llmKeyList = + Arrays.asList( + SpgAppConstant.TYPE, + SpgAppConstant.MODEL, + SpgAppConstant.BASE_URL, + SpgAppConstant.API_KEY, + SpgAppConstant.VECTOR_DIMENSIONS); + } else { + return new JSONObject(); + } + JSONObject llmHandle = JSON.parseObject(jsonObject.toString()); + if (llmHandle == null) { + return new JSONObject(); + } + Iterator keys = jsonObject.keySet().iterator(); + while (keys.hasNext()) { + String key = keys.next(); + if (!llmKeyList.contains(key)) { + llmHandle.remove(key); + } + } + return llmHandle; + } + + private static void backwardCompatibleLLM(JSONObject llmJson) { + if (llmJson == null) { + return; + } + String clientType = llmJson.getString(SpgAppConstant.CLIENT_TYPE); + ModelType modelType = ModelType.getByCode(clientType); + if (modelType == null) { + modelType = ModelType.getByCode(llmJson.getString(SpgAppConstant.TYPE)); + } + if (modelType == null) { + return; + } + switch (modelType) { + case MAAS: + if (llmJson.containsKey(SpgAppConstant.CLIENT_TYPE)) { + llmJson.remove(SpgAppConstant.CLIENT_TYPE); + } + if (!llmJson.containsKey(SpgAppConstant.TYPE)) { + llmJson.put(SpgAppConstant.TYPE, clientType); + } + if (!llmJson.containsKey(SpgAppConstant.TEMPERATURE)) { + llmJson.put(SpgAppConstant.TEMPERATURE, 0.7); + } + if (!llmJson.containsKey(SpgAppConstant.STREAM)) { + llmJson.put(SpgAppConstant.STREAM, "False"); + } + break; + case OLLAMA: + case VLLM: + if (llmJson.containsKey(SpgAppConstant.CLIENT_TYPE)) { + llmJson.remove(SpgAppConstant.CLIENT_TYPE); + } + if (!llmJson.containsKey(SpgAppConstant.TYPE)) { + llmJson.put(SpgAppConstant.TYPE, clientType); + } + break; + default: + LOGGER.info("not support model type: {}", clientType); + break; + } + } + + private Config toModel(ConfigRequest request) { + Config config = new Config(); + config.setProjectId("0"); + config.setUserNo("admin"); + config.setId(request.getId()); + config.setConfigName(request.getConfigName()); + config.setConfigId(request.getConfigId()); + config.setVersion(request.getVersion()); + config.setConfig(request.getConfig()); + return config; + } +} diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/PermissionManagerImpl.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/PermissionManagerImpl.java new file mode 100644 index 000000000..c1654e231 --- /dev/null +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/PermissionManagerImpl.java @@ -0,0 +1,229 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.biz.common.impl; + +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.api.facade.dto.common.request.PermissionRequest; +import com.antgroup.openspg.server.biz.common.AccountManager; +import com.antgroup.openspg.server.biz.common.PermissionManager; +import com.antgroup.openspg.server.common.model.account.Account; +import com.antgroup.openspg.server.common.model.permission.Permission; +import com.antgroup.openspg.server.common.model.project.AccountRoleInfo; +import com.antgroup.openspg.server.common.service.permission.PermissionRepository; +import com.antgroup.openspgapp.common.util.enums.PermissionEnum; +import com.antgroup.openspgapp.common.util.enums.ResourceTagEnum; +import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.commons.collections4.CollectionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +public class PermissionManagerImpl implements PermissionManager { + + @Autowired private PermissionRepository permissionRepository; + + @Autowired private AccountManager accountManager; + + @Override + public Integer create(PermissionRequest request) { + List permissionList = toModels(request); + if (CollectionUtils.isEmpty(permissionList)) { + return 0; + } + Set exitsPermissions = + permissionRepository + .selectByResourceIdsAndResourceTag(request.getResourceIds(), request.getResourceTag()) + .stream() + .map(permission -> permission.getUserNo()) + .collect(Collectors.toSet()); + int count = 0; + for (Permission permission : permissionList) { + if (exitsPermissions.contains(permission.getUserNo())) { + count += permissionRepository.update(permission); + } else { + count += permissionRepository.save(permission); + } + } + return count; + } + + @Override + public Integer update(PermissionRequest request) { + int count = 0; + List permissionList = toModels(request); + if (CollectionUtils.isEmpty(permissionList)) { + return 0; + } + if (null != request.getId()) { + count = permissionRepository.update(permissionList.get(0)); + } else { + for (Permission permission : permissionList) { + count += permissionRepository.update(permission); + } + } + return count; + } + + @Override + public Paged query( + String userNo, + String roleType, + Long resourceId, + String resourceTag, + Integer page, + Integer size) { + if (StringUtils.isBlank(resourceTag) || null == resourceId) { + return new Paged<>(); + } + // 翻译roleId + Long roleId = null; + if (StringUtils.isNotBlank(roleType)) { + roleId = PermissionEnum.valueOf(roleType).getId(); + } + Paged permissionPage = + permissionRepository.queryPage(userNo, roleId, resourceId, resourceTag, page, size); + if (permissionPage.getTotal() > 0 && CollectionUtils.isNotEmpty(permissionPage.getResults())) { + List permissionList = permissionPage.getResults(); + Set userNos = + permissionList.stream().map(Permission::getUserNo).collect(Collectors.toSet()); + Map userNameMap = + accountManager.getSimpleAccountByUserNoList(userNos).stream() + .collect(Collectors.toMap(Account::getWorkNo, account -> account)); + permissionList.forEach( + permission -> { + Account account = userNameMap.get(permission.getUserNo()); + if (null != account) { + permission.setUserName( + StringUtils.isNotBlank(account.getNickName()) + ? account.getNickName() + : account.getWorkNo()); + } + permission.setAccountRoleInfo( + new AccountRoleInfo(PermissionEnum.getRoleTypeById(permission.getRoleId()).name())); + }); + } + return permissionPage; + } + + @Override + public Integer removePermission(PermissionRequest request) { + int count = 0; + if (null != request.getId()) { + count = permissionRepository.delete(new Permission(request.getId())); + } else { + List permissionList = toModels(request); + for (Permission permission : permissionList) { + count += permissionRepository.delete(permission); + } + } + return count; + } + + @Override + public List getPermissionByUserRolesAndId( + List resourceIds, String userNo, String roleType, String resourceTag) { + return permissionRepository.getPermissionByUserRolesAndId( + resourceIds, userNo, roleType, resourceTag); + } + + @Override + public List getPermissionByUserNoAndResourceTag(String userNo, String resourceTag) { + return permissionRepository.getPermissionByUserNoAndResourceTag(userNo, resourceTag); + } + + @Override + public boolean isSuper(String userNo) { + List superPermissions = + getPermissionByUserRolesAndId( + Lists.newArrayList(0L), + userNo, + PermissionEnum.SUPER.name(), + ResourceTagEnum.PLATFORM.name()); + return CollectionUtils.isNotEmpty(superPermissions); + } + + @Override + public boolean isProjectRole(String userNo, Long projectId) { + List permissionList = + getPermissionByUserRolesAndId( + Lists.newArrayList(projectId), null, null, ResourceTagEnum.PROJECT.name()); + if (CollectionUtils.isEmpty(permissionList)) { + return false; + } + return permissionList.stream() + .anyMatch(permission -> StringUtils.equals(permission.getUserNo(), userNo)); + } + + @Override + public List getOwnerUserNameByProjectId(Long projectId) { + String defaultOwner = "there is no resource administrator for the project"; + List permissionList = + getPermissionByUserRolesAndId( + Lists.newArrayList(projectId), + null, + PermissionEnum.OWNER.name(), + ResourceTagEnum.PROJECT.name()); + List userNoList = + permissionList.stream() + .filter(permission -> permission.getRoleId().equals(PermissionEnum.OWNER.getId())) + .map(Permission::getUserNo) + .collect(Collectors.toList()); + if (CollectionUtils.isEmpty(userNoList)) { + return Lists.newArrayList(defaultOwner); + } + List userName = + accountManager.getSimpleAccountByUserNoList(userNoList).stream() + .map( + account -> + StringUtils.isNotBlank(account.getNickName()) + ? account.getNickName() + : account.getWorkNo()) + .collect(Collectors.toList()); + if (CollectionUtils.isEmpty(userName)) { + return Lists.newArrayList(defaultOwner); + } + return userName; + } + + @Override + public Permission selectByPrimaryKey(Long id) { + return permissionRepository.selectByPrimaryKey(id); + } + + private List toModels(PermissionRequest request) { + Long roleId = null; + if (StringUtils.equals(request.getResourceTag(), ResourceTagEnum.PROJECT.name())) { + roleId = PermissionEnum.valueOf(request.getRoleType()).getId(); + } + List permissionList = new ArrayList<>(); + for (String userNo : request.getUserNos()) { + Permission permission = new Permission(); + permission.setUserNo(userNo); + permission.setResourceTag(request.getResourceTag()); + permission.setRoleId(roleId); + permission.setRoleType(request.getRoleType()); + for (Long resourceId : request.getResourceIds()) { + permission.setResourceId(resourceId); + permissionList.add(permission); + } + } + return permissionList; + } +} diff --git a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ProjectManagerImpl.java b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ProjectManagerImpl.java index 7116e7e63..17603280a 100644 --- a/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ProjectManagerImpl.java +++ b/server/biz/common/src/main/java/com/antgroup/openspg/server/biz/common/impl/ProjectManagerImpl.java @@ -17,6 +17,10 @@ import com.antgroup.openspg.cloudext.impl.graphstore.neo4j.Neo4jConstants; import com.antgroup.openspg.common.util.StringUtils; import com.antgroup.openspg.common.util.neo4j.Neo4jAdminUtils; +import com.antgroup.openspg.core.schema.model.SPGSchema; +import com.antgroup.openspg.core.schema.model.SPGSchemaAlterCmd; +import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier; +import com.antgroup.openspg.core.schema.model.type.BaseSPGType; import com.antgroup.openspg.server.api.facade.Paged; import com.antgroup.openspg.server.api.facade.dto.common.request.ProjectCreateRequest; import com.antgroup.openspg.server.api.facade.dto.common.request.ProjectQueryRequest; @@ -25,7 +29,13 @@ import com.antgroup.openspg.server.common.model.project.Project; import com.antgroup.openspg.server.common.service.project.ProjectRepository; import com.antgroup.openspg.server.common.service.project.ProjectService; +import com.antgroup.openspg.server.core.schema.service.alter.sync.BaseSchemaSyncer; +import com.antgroup.openspg.server.core.schema.service.alter.sync.SchemaStorageEnum; +import com.antgroup.openspg.server.core.schema.service.alter.sync.SchemaSyncerFactory; +import com.antgroup.openspg.server.core.schema.service.type.SPGTypeService; import java.util.List; +import java.util.Set; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; @@ -33,10 +43,13 @@ import org.springframework.web.util.UriComponentsBuilder; @Service +@Slf4j public class ProjectManagerImpl implements ProjectManager { @Autowired private ProjectRepository projectRepository; @Autowired private ProjectService projectService; + @Autowired private SchemaSyncerFactory schemaSyncerFactory; + @Autowired private SPGTypeService spgTypeService; @Value("${cloudext.graphstore.url:}") private String url; @@ -44,15 +57,15 @@ public class ProjectManagerImpl implements ProjectManager { @Override public Project create(ProjectCreateRequest request) { JSONObject config = setDatabase(request.getConfig(), request.getNamespace()); - setGraphStore(request.getNamespace(), config, true); Project project = new Project( null, request.getName(), - request.getDesc(), + request.getDescription(), request.getNamespace(), request.getTenantId(), config.toJSONString()); + setGraphStore(project, config, true); Long projectId = projectRepository.save(project); project.setId(projectId); return project; @@ -62,10 +75,21 @@ public Project create(ProjectCreateRequest request) { public Project update(ProjectCreateRequest request) { Project project = projectRepository.queryById(request.getId()); JSONObject config = setDatabase(request.getConfig(), project.getNamespace()); - setGraphStore(request.getNamespace(), config, false); + setGraphStore(project, config, true); config = setVectorDimensions(config, project); - Project update = new Project(request.getId(), null, null, null, null, config.toJSONString()); - return projectRepository.update(update); + Project update = + new Project( + request.getId(), + request.getName(), + request.getDescription(), + null, + null, + config.toJSONString()); + update = projectRepository.update(update); + long start = System.currentTimeMillis(); + createSchema(request.getId()); + log.info("createSchema cost {} ms", System.currentTimeMillis() - start); + return update; } private JSONObject setDatabase(String configStr, String namespace) { @@ -120,7 +144,11 @@ public Integer deleteById(Long projectId) { if (project == null) { return 0; } - deleteDatabase(project); + try { + deleteDatabase(project); + } catch (Exception e) { + log.error("delete project database Exception:" + project, e); + } return projectRepository.deleteById(projectId); } @@ -129,16 +157,28 @@ public void deleteDatabase(Project project) { JSONObject config = JSONObject.parseObject(project.getConfig()); UriComponents uriComponents = UriComponentsBuilder.fromUriString(url).build(); String database = uriComponents.getQueryParams().getFirst(Neo4jConstants.DATABASE); + String host = + String.format( + "%s://%s:%s", + uriComponents.getScheme(), uriComponents.getHost(), uriComponents.getPort()); + String user = uriComponents.getQueryParams().getFirst(Neo4jConstants.USER); + String password = uriComponents.getQueryParams().getFirst(Neo4jConstants.PASSWORD); JSONObject graphStore = config.getJSONObject(CommonConstants.GRAPH_STORE); - String host = graphStore.getString(Neo4jConstants.URI); - String user = graphStore.getString(Neo4jConstants.USER); - String password = graphStore.getString(Neo4jConstants.PASSWORD); + if (graphStore.containsKey(Neo4jConstants.URI)) { + host = graphStore.getString(Neo4jConstants.URI); + } + if (graphStore.containsKey(Neo4jConstants.USER)) { + user = graphStore.getString(Neo4jConstants.USER); + } + if (graphStore.containsKey(Neo4jConstants.PASSWORD)) { + password = graphStore.getString(Neo4jConstants.PASSWORD); + } String dropDatabase = project.getNamespace().toLowerCase(); Neo4jAdminUtils driver = new Neo4jAdminUtils(host, user, password, database); driver.neo4jGraph.dropDatabase(dropDatabase); } - public void setGraphStore(String namespace, JSONObject config, boolean createDatabase) { + public void setGraphStore(Project project, JSONObject config, boolean createDatabase) { UriComponents uriComponents = UriComponentsBuilder.fromUriString(url).build(); String database = uriComponents.getQueryParams().getFirst(Neo4jConstants.DATABASE); String host = @@ -151,6 +191,8 @@ public void setGraphStore(String namespace, JSONObject config, boolean createDat JSONObject graphStore = config.getJSONObject(CommonConstants.GRAPH_STORE); if (graphStore.containsKey(Neo4jConstants.URI)) { host = graphStore.getString(Neo4jConstants.URI); + } else { + graphStore.put(Neo4jConstants.URI, host); } if (graphStore.containsKey(Neo4jConstants.USER)) { user = graphStore.getString(Neo4jConstants.USER); @@ -164,11 +206,26 @@ public void setGraphStore(String namespace, JSONObject config, boolean createDat } if (createDatabase) { Neo4jAdminUtils driver = new Neo4jAdminUtils(host, user, password, database); - String projectDatabase = namespace.toLowerCase(); + String projectDatabase = project.getNamespace().toLowerCase(); driver.neo4jGraph.createDatabase(projectDatabase); } } + public void createSchema(Long projectId) { + try { + BaseSchemaSyncer schemaSyncer = schemaSyncerFactory.getSchemaSyncer(SchemaStorageEnum.GRAPH); + if (schemaSyncer != null) { + Set spreadStdTypeNames = spgTypeService.querySpreadStdTypeName(); + List spgTypes = spgTypeService.queryProjectSchema(projectId).getSpgTypes(); + SPGSchemaAlterCmd schemaEditCmd = + new SPGSchemaAlterCmd(new SPGSchema(spgTypes, spreadStdTypeNames)); + schemaSyncer.syncSchema(projectId, schemaEditCmd); + } + } catch (Exception e) { + log.error("createSchema Exception:" + projectId, e); + } + } + @Override public List query(ProjectQueryRequest request) { return projectRepository.query(request); @@ -189,4 +246,9 @@ public String getSearchEngineUrl(Long projectId) { // For Neo4j, GraphStore and SearchEngine are the same. return getGraphStoreUrl(projectId); } + + @Override + public Project queryByNamespace(String namespace) { + return projectRepository.queryByNamespace(namespace); + } } diff --git a/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/SchemaManager.java b/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/SchemaManager.java index 512ce9e1c..b56ca38cb 100644 --- a/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/SchemaManager.java +++ b/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/SchemaManager.java @@ -14,10 +14,13 @@ package com.antgroup.openspg.server.biz.schema; import com.antgroup.openspg.core.schema.model.predicate.Property; +import com.antgroup.openspg.core.schema.model.predicate.Relation; +import com.antgroup.openspg.core.schema.model.semantic.SPGOntologyEnum; import com.antgroup.openspg.core.schema.model.type.BaseSPGType; import com.antgroup.openspg.core.schema.model.type.ProjectSchema; import com.antgroup.openspg.core.schema.model.type.SPGTypeEnum; import com.antgroup.openspg.server.api.facade.dto.schema.request.SchemaAlterRequest; +import com.antgroup.openspg.server.core.schema.service.predicate.model.SimpleProperty; import java.util.List; /** Provide methods to manager project's schema information. */ @@ -46,6 +49,31 @@ public interface SchemaManager { */ BaseSPGType getSpgType(String uniqueName); + /** + * Query spg type by unique id, it will return null of the type by id not exists. + * + * @param uniqueIds list of unique id + * @return list of spg type detail + */ + List querySPGTypeById(List uniqueIds); + + /** + * Query relation type by spg unique id. + * + * @param uniqueIds list of spg unique id + * @return list of relation type + */ + List queryRelationByUniqueId(List uniqueIds); + + /** + * Query relation type by spg unique id. + * + * @param uniqueIds list of spg unique id + * @param ontologyEnum + * @return list of relation type + */ + List queryPropertyByUniqueId(List uniqueIds, SPGOntologyEnum ontologyEnum); + /** * Get built-in properties of a kind of spg type. * diff --git a/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/impl/SchemaManagerImpl.java b/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/impl/SchemaManagerImpl.java index 618139be0..8266adede 100644 --- a/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/impl/SchemaManagerImpl.java +++ b/server/biz/schema/src/main/java/com/antgroup/openspg/server/biz/schema/impl/SchemaManagerImpl.java @@ -16,6 +16,8 @@ import com.antgroup.openspg.core.schema.model.SchemaException; import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier; import com.antgroup.openspg.core.schema.model.predicate.Property; +import com.antgroup.openspg.core.schema.model.predicate.Relation; +import com.antgroup.openspg.core.schema.model.semantic.SPGOntologyEnum; import com.antgroup.openspg.core.schema.model.type.BaseSPGType; import com.antgroup.openspg.core.schema.model.type.ProjectSchema; import com.antgroup.openspg.core.schema.model.type.SPGTypeEnum; @@ -29,6 +31,7 @@ import com.antgroup.openspg.server.common.service.project.ProjectService; import com.antgroup.openspg.server.core.schema.service.alter.SchemaAlterPipeline; import com.antgroup.openspg.server.core.schema.service.alter.model.SchemaAlterContext; +import com.antgroup.openspg.server.core.schema.service.predicate.model.SimpleProperty; import com.antgroup.openspg.server.core.schema.service.type.SPGTypeService; import com.antgroup.openspg.server.core.schema.service.type.model.BuiltInPropertyEnum; import com.antgroup.openspg.server.core.schema.service.util.PropertyUtils; @@ -97,6 +100,22 @@ public BaseSPGType getSpgType(String uniqueName) { return spgTypeService.querySPGTypeByIdentifier(spgTypeIdentifier); } + @Override + public List querySPGTypeById(List uniqueIds) { + return spgTypeService.querySPGTypeById(uniqueIds); + } + + @Override + public List queryRelationByUniqueId(List uniqueIds) { + return spgTypeService.queryRelationByUniqueId(uniqueIds); + } + + @Override + public List queryPropertyByUniqueId( + List uniqueIds, SPGOntologyEnum ontologyEnum) { + return spgTypeService.queryPropertyByUniqueId(uniqueIds, ontologyEnum); + } + @Override public List getBuiltInProperty(SPGTypeEnum spgTypeEnum) { List builtInProperties = new ArrayList<>(); diff --git a/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/ReasonerManagerImpl.java b/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/ReasonerManagerImpl.java index b0d71226d..198abcbc2 100644 --- a/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/ReasonerManagerImpl.java +++ b/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/ReasonerManagerImpl.java @@ -27,9 +27,11 @@ import java.util.ArrayList; import java.util.List; import java.util.UUID; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +@Slf4j @Service public class ReasonerManagerImpl implements ReasonerManager { @Autowired private ProjectManager projectManager; @@ -68,6 +70,7 @@ public ReasonerTaskResponse reason(ReasonerTaskRequest request) { reasonerTask.setParams(request.getParams()); reasonerTask.setProjectId(request.getProjectId()); reasonerTask.setGraphStoreUrl(graphStoreUrl); + log.info("dsl: " + request.getDsl()); ReasonerTask ret = reasonerService.runTask(reasonerTask); ReasonerTaskResponse reasonerTaskResponse = new ReasonerTaskResponse(); reasonerTaskResponse.setProjectId(request.getProjectId()); diff --git a/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/SearchManagerImpl.java b/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/SearchManagerImpl.java index f4c2ba699..fe5eb025d 100644 --- a/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/SearchManagerImpl.java +++ b/server/biz/service/src/main/java/com/antgroup/openspg/server/biz/service/impl/SearchManagerImpl.java @@ -24,13 +24,16 @@ import com.antgroup.openspg.server.api.facade.dto.service.request.VectorSearchRequest; import com.antgroup.openspg.server.biz.common.ProjectManager; import com.antgroup.openspg.server.biz.service.SearchManager; +import com.antgroup.openspgapp.common.util.utils.SpgAppConstant; import com.google.common.collect.Lists; import java.util.Arrays; import java.util.List; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ArrayUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +@Slf4j @Service public class SearchManagerImpl implements SearchManager { @@ -63,10 +66,18 @@ public List textSearch(TextSearchRequest request) { String[] labels = new String[request.getLabelConstraints().size()]; labelConstraints = Arrays.asList(request.getLabelConstraints().toArray(labels)); } - int topk = -1; - if (request.getTopk() != null && request.getTopk() > 0) topk = request.getTopk(); + int page = + (null == request.getPage() || request.getPage() <= 0) + ? SpgAppConstant.DEFAULT_PAGE + : request.getPage(); + int topk = + (null == request.getTopk() || request.getTopk() <= 0) + ? SpgAppConstant.DEFAULT_PAGE_SIZE + : request.getTopk(); + page = (page - 1) * topk; SearchRequest searchRequest = new SearchRequest(); searchRequest.setQuery(new FullTextSearchQuery(queryString, labelConstraints)); + searchRequest.setFrom(page); searchRequest.setSize(topk); return searchEngineClient.search(searchRequest); } diff --git a/server/common/model/pom.xml b/server/common/model/pom.xml index 392b29640..7725cef82 100644 --- a/server/common/model/pom.xml +++ b/server/common/model/pom.xml @@ -39,5 +39,9 @@ org.projectlombok lombok + + com.alibaba + fastjson + diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonConstants.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonConstants.java index 9427f9c02..09a981a6f 100644 --- a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonConstants.java +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonConstants.java @@ -20,5 +20,6 @@ public class CommonConstants { public static final String VECTORIZER = "vectorizer"; public static final String VECTOR_DIMENSIONS = "vector_dimensions"; public static final String LLM = "llm"; + public static final String LLM_SELECT = "llm_select"; public static final String PROMPT = "prompt"; } diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonEnum.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonEnum.java new file mode 100644 index 000000000..b0f6125f9 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/CommonEnum.java @@ -0,0 +1,104 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.model; + +/** all Common enum */ +public interface CommonEnum { + + /** Status Enum */ + enum Status { + ENABLE, + DISABLE + } + + enum DataSourceCategory { + BATCH, + STREAM, + FILE + } + + enum DataSourceType { + + /** ODPS */ + ODPS("ODPS", DataSourceCategory.BATCH, null), + /** Hive */ + HIVE("Hive", DataSourceCategory.BATCH, "org.apache.hive.jdbc.HiveDriver"), + /** MySQL */ + MYSQL("MySQL", DataSourceCategory.BATCH, "com.mysql.jdbc.Driver"), + /** Oracle */ + ORACLE("Oracle", DataSourceCategory.BATCH, "oracle.jdbc.driver.OracleDriver"), + /** PostgreSQL */ + POSTGRESQL("PostgreSQL", DataSourceCategory.BATCH, "org.postgresql.Driver"), + /** DB2 */ + DB2("DB2", DataSourceCategory.BATCH, "com.ibm.db2.jdbc.app.DB2Driver"), + /** MariaDB */ + MARIA_DB("MariaDB", DataSourceCategory.BATCH, "org.mariadb.jdbc.Driver"), + /** MS Sql */ + MS_SQL("MS Sql", DataSourceCategory.BATCH, "com.microsoft.sqlserver.jdbc.SQLServerDriver"), + /** SLS */ + SLS("SLS", DataSourceCategory.STREAM, null), + /** Kafka */ + KAFKA("Kafka", DataSourceCategory.STREAM, null), + /** MQ */ + SOFA_MQ("SofaMQ", DataSourceCategory.STREAM, null), + /** LIGHT_DRC */ + LIGHT_DRC("LIGHT", DataSourceCategory.STREAM, null), + /** ONS */ + ONS("ONS", DataSourceCategory.STREAM, null), + /** CSV */ + CSV("CSV", DataSourceCategory.FILE, null), + /** Text */ + TEXT("Text", DataSourceCategory.FILE, null), + /** json */ + JSON("JSON", DataSourceCategory.FILE, null), + /** parquet */ + PARQUET("Parquet", DataSourceCategory.FILE, null), + /** orc */ + ORC("Orc", DataSourceCategory.FILE, null), + /** avro */ + AVRO("Avro", DataSourceCategory.FILE, null); + + private String name; + + private DataSourceCategory category; + + private String driver; + + DataSourceType(String name, DataSourceCategory category, String driver) { + this.name = name; + this.category = category; + this.driver = driver; + } + + public String getName() { + return name; + } + + public DataSourceCategory getCategory() { + return category; + } + + public String getDriver() { + return driver; + } + + public static DataSourceType toEnum(String name) { + for (DataSourceType type : DataSourceType.values()) { + if (type.name().equalsIgnoreCase(name)) { + return type; + } + } + return null; + } + } +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/account/Account.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/account/Account.java new file mode 100644 index 000000000..448e0e53a --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/account/Account.java @@ -0,0 +1,85 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.model.account; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import java.util.Date; +import java.util.List; +import lombok.Data; + +@Data +public class Account extends BaseModel { + + private Long id; + private String workNo; + private String token; + private String salt; + private String realName; + private String nickName; + private String account; + private String password; + private String confirmPassword; + private String email; + private Date gmtCreate; + private Date gmtModified; + private String config; + private String useCurrentLanguage; + private List roleNames; + + public Account() {} + + public Account( + Long id, + String workNo, + String realName, + String nickName, + String account, + String email, + Date gmtCreate, + Date gmtModified, + String config, + String useCurrentLanguage) { + this.id = id; + this.workNo = workNo; + this.realName = realName; + this.nickName = nickName; + this.account = account; + this.email = email; + this.gmtCreate = gmtCreate; + this.gmtModified = gmtModified; + this.config = config; + this.useCurrentLanguage = useCurrentLanguage; + } + + public Account( + Long id, + String workNo, + String realName, + String nickName, + String account, + String email, + String salt, + String config, + String useCurrentLanguage) { + this.id = id; + this.workNo = workNo; + this.realName = realName; + this.nickName = nickName; + this.account = account; + this.email = email; + this.salt = salt; + this.config = config; + this.useCurrentLanguage = useCurrentLanguage; + } +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJob.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJob.java new file mode 100644 index 000000000..57fbd00bc --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJob.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.model.bulider; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class BuilderJob extends BaseModel { + + private static final long serialVersionUID = 8873542124566103571L; + + private Long id; + private Long projectId; + private Date gmtCreate; + private Date gmtModified; + private String modifyUser; + private String createUser; + private Long taskId; + private String jobName; + private Long chunkNum; + private String fileUrl; + private String status; + private String type; + private String extension; + private String version; + private String cron; + private String pipeline; + private String computingConf; + private String lifeCycle; + private String action; +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJobQuery.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJobQuery.java new file mode 100644 index 000000000..4e08a61b2 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/bulider/BuilderJobQuery.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.model.bulider; + +import java.util.Date; +import java.util.List; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class BuilderJobQuery extends BuilderJob { + + private static final long serialVersionUID = 7320973107956820414L; + + private List ids; + + private Date startCreateTime; + + private Date endCreateTime; + + private Integer pageNo; + + private Integer pageSize; + + private String sort; + + private String order; + + private String keyword; +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/config/Config.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/config/Config.java new file mode 100644 index 000000000..74884a57e --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/config/Config.java @@ -0,0 +1,74 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.model.config; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import java.util.Date; +import lombok.Data; + +@Data +public class Config extends BaseModel { + /** primary key */ + private Long id; + + /** create time */ + private Date gmtCreate; + + /** update time */ + private Date gmtModified; + + /** creator userNo */ + private String userNo; + + /** Project ID, which can be a unique value for a certain domain. */ + private String projectId; + + /** config name */ + private String configName; + + /** config id */ + private String configId; + + /** config version */ + private String version; + + /** Status, 1: Offline status (default) 2: Online */ + private Integer status; + + /** config json */ + private String config; + + /** version description */ + private String description; + + /** Resource ID, used for foreign key association with the schem view. */ + private String resourceId; + + /** resource type */ + private String resourceType; + + private boolean showProfilePicture = false; + + private boolean showUserConfig = false; + + public Config() {} + + public Config(Long id, String configName, String configId, String config, String resourceType) { + this.id = id; + this.configName = configName; + this.configId = configId; + this.config = config; + this.resourceType = resourceType; + } +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/DataRecord.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/DataRecord.java new file mode 100644 index 000000000..fe6d90526 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/DataRecord.java @@ -0,0 +1,32 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.model.data; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import java.util.Map; +import lombok.Data; + +@Data +public class DataRecord extends BaseModel { + + private String name; + + private String docId; + + private double score; + + private String label; + + private Map fields; +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/EntitySampleData.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/EntitySampleData.java new file mode 100644 index 000000000..619ccc7c9 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/data/EntitySampleData.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.model.data; + +import java.util.Map; +import lombok.Data; + +/** sample data of entity */ +@Data +public class EntitySampleData { + + /** entity data id */ + private String id; + + /** entity type name */ + private String label; + + /** entity properties */ + private Map properties; +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/Column.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/Column.java new file mode 100644 index 000000000..75157af54 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/Column.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.model.datasource; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class Column extends BaseModel { + + private static final long serialVersionUID = 2897040705844372408L; + + private String name; + private String type; + private String comment; + + public Column() {} + + public Column(String name, String type, String comment) { + this.name = name; + this.type = type; + this.comment = comment; + } +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSource.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSource.java new file mode 100644 index 000000000..9425961e3 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSource.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.model.datasource; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.server.common.model.CommonEnum.DataSourceCategory; +import com.antgroup.openspg.server.common.model.CommonEnum.DataSourceType; +import com.antgroup.openspg.server.common.model.CommonEnum.Status; +import com.antgroup.openspg.server.common.model.base.BaseModel; +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class DataSource extends BaseModel { + + private static final long serialVersionUID = -2573367617071842562L; + + private Long id; + private Date gmtCreate; + private Date gmtModified; + private String createUser; + private String updateUser; + private Status status; + private String remark; + private DataSourceType type; + private String dbName; + private String dbUrl; + private String dbUser; + private String dbPassword; + private String encrypt; + private String dbDriverName; + private DataSourceCategory category; + private JSONObject connectionInfo; + + public DataSource() {} + + public DataSource(String dbUrl, String dbUser, String dbPassword) { + this.dbUrl = dbUrl; + this.dbUser = dbUser; + this.dbPassword = dbPassword; + } +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSourceQuery.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSourceQuery.java new file mode 100644 index 000000000..4d8718061 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/datasource/DataSourceQuery.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.model.datasource; + +import java.util.Date; +import java.util.List; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class DataSourceQuery extends DataSource { + + private static final long serialVersionUID = 1433710115349759371L; + + private List ids; + + private Date startCreateTime; + + private Date endCreateTime; + + private Integer pageNo; + + private Integer pageSize; + + private String sort; + + private String order; +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/job/SubGraph.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/job/SubGraph.java index 349079e87..642a5093c 100644 --- a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/job/SubGraph.java +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/job/SubGraph.java @@ -29,6 +29,7 @@ public class SubGraph extends BaseModel { private List resultNodes; private List resultEdges; + private String className; @Data public static class Node extends BaseModel { diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/permission/Permission.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/permission/Permission.java new file mode 100644 index 000000000..a1cc995ea --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/permission/Permission.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.model.permission; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import com.antgroup.openspg.server.common.model.project.AccountRoleInfo; +import lombok.Data; + +@Data +public class Permission extends BaseModel { + + private Long id; + private String userNo; + private Long resourceId; + private String resourceTag; + private Long roleId; + private String roleType; + private String userName; + private AccountRoleInfo accountRoleInfo; + + public Permission() {} + + public Permission(Long id) { + this.id = id; + } + + public Permission(Long id, String userNo, Long resourceId, String resourceTag, Long roleId) { + this.id = id; + this.userNo = userNo; + this.resourceId = resourceId; + this.resourceTag = resourceTag; + this.roleId = roleId; + } +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/project/AccountRoleInfo.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/project/AccountRoleInfo.java new file mode 100644 index 000000000..b981bb5b4 --- /dev/null +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/project/AccountRoleInfo.java @@ -0,0 +1,34 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.model.project; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import lombok.Data; + +/** @version UserRoleInfo.java, v 0.1 2024年11月26日 上午11:41 */ +@Data +public class AccountRoleInfo extends BaseModel { + + /** Specific Role Name */ + String roleName; + + /** Permission Detail Information */ + String permissionDetail; + + public AccountRoleInfo() {} + + public AccountRoleInfo(String roleName) { + this.roleName = roleName; + } +} diff --git a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/scheduler/SchedulerEnum.java b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/scheduler/SchedulerEnum.java index 4ff5e9fd8..0089f4d24 100644 --- a/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/scheduler/SchedulerEnum.java +++ b/server/common/model/src/main/java/com/antgroup/openspg/server/common/model/scheduler/SchedulerEnum.java @@ -76,9 +76,16 @@ public static boolean isRunning(TaskStatus status) { } } + enum SchedulerInfoStatus { + WAIT, + RUNNING, + STOP + } + /** Translate Enum */ enum TranslateType { - LOCAL_EXAMPLE("localExampleTranslate"); + LOCAL_EXAMPLE("localExampleTranslate"), + KAG_BUILDER("kagBuilderTranslate"); private String type; diff --git a/server/common/service/pom.xml b/server/common/service/pom.xml index 497f2651f..001871bb6 100644 --- a/server/common/service/pom.xml +++ b/server/common/service/pom.xml @@ -60,5 +60,87 @@ org.apache.commons commons-collections4 + + com.github.rholder + guava-retrying + + + + + org.apache.hive + hive-jdbc + 3.1.0 + + + org.mortbay.jetty + servlet-api-2.5 + + + javax.servlet + servlet-api + + + org.eclipse.jetty + jetty-runner + + + org.apache.hadoop + hadoop-yarn-server-resourcemanager + + + org.apache.hbase + hbase-client + + + org.apache.hbase + hbase-common + + + org.apache.hbase + hbase-server + + + com.sun.jersey + jersey-core + + + org.apache.logging.log4j + log4j-web + + + org.apache.logging.log4j + log4j-1.2-api + + + org.apache.logging.log4j + log4j-slf4j-impl + + + org.slf4j + slf4j-log4j12 + + + + + mysql + mysql-connector-java + 5.1.30 + + + com.oracle.database.jdbc + ojdbc8 + 21.1.0.0 + + + com.aliyun.odps + odps-sdk-core + 0.36.4-public + + + com.aliyun.odps + odps-sdk-commons + 0.36.4-public + + diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/account/AccountRepository.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/account/AccountRepository.java new file mode 100644 index 000000000..34d0f684e --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/account/AccountRepository.java @@ -0,0 +1,108 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.service.account; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.account.Account; +import java.util.Collection; +import java.util.List; + +/** account repository */ +public interface AccountRepository { + + /** + * insert a account into db + * + * @param account + * @return + */ + Integer insert(Account account); + + /** + * update account + * + * @param account + * @return + */ + Integer update(Account account); + + /** + * update account by userNo + * + * @param record + * @return + */ + Integer updateByUserNo(Account record); + + /** + * delete by userNo + * + * @param userNo java.lang.String + * @return int + * @param userNo + * @return + */ + Integer deleteByUserNo(String userNo); + + /** + * select account by userNo + * + * @param userNo + * @return + */ + Account selectByUserNo(String userNo); + + /** + * select account by userNo with private info + * + * @param userNo + * @return + */ + Account selectWithPrivateByUserNo(String userNo); + + /** + * query account by part of userNo or nickName or realName + * + * @param keyword + * @return + */ + List query(String keyword); + + /** + * get account list + * + * @param loginAccount + * @param page + * @param size + * @return + */ + Paged getAccountList(String loginAccount, Integer page, Integer size); + + /** + * batch get simple user by userNo list + * + * @param userNos + * @return + */ + List getSimpleAccountByUserNoList(Collection userNos); + + /** + * update user config + * + * @param userNo + * @param config + * @return + */ + int updateUserConfig(String userNo, String config); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobRepository.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobRepository.java new file mode 100644 index 000000000..da2766679 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobRepository.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.builder; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.bulider.BuilderJobQuery; + +public interface BuilderJobRepository { + + /** insert Job */ + Long insert(BuilderJob record); + + /** delete By Id */ + int deleteById(Long id); + + /** update Job */ + Long update(BuilderJob record); + + /** get By id */ + BuilderJob getById(Long id); + + /** query By Condition */ + Paged query(BuilderJobQuery record); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobService.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobService.java new file mode 100644 index 000000000..bca431af7 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/BuilderJobService.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.builder; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.bulider.BuilderJobQuery; + +public interface BuilderJobService { + + /** insert Job */ + Long insert(BuilderJob record); + + /** delete By Id */ + int deleteById(Long id); + + /** update Job */ + Long update(BuilderJob record); + + /** get By id */ + BuilderJob getById(Long id); + + /** query By Condition */ + Paged query(BuilderJobQuery record); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/impl/BuilderJobServiceImpl.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/impl/BuilderJobServiceImpl.java new file mode 100644 index 000000000..99665ac3d --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/builder/impl/BuilderJobServiceImpl.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.builder.impl; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.bulider.BuilderJobQuery; +import com.antgroup.openspg.server.common.service.builder.BuilderJobRepository; +import com.antgroup.openspg.server.common.service.builder.BuilderJobService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +@Service +@Slf4j +public class BuilderJobServiceImpl implements BuilderJobService { + + @Autowired private BuilderJobRepository builderJobRepository; + + @Override + public Long insert(BuilderJob record) { + return builderJobRepository.insert(record); + } + + @Override + public int deleteById(Long id) { + return builderJobRepository.deleteById(id); + } + + @Override + public Long update(BuilderJob record) { + return builderJobRepository.update(record); + } + + @Override + public BuilderJob getById(Long id) { + return builderJobRepository.getById(id); + } + + @Override + public Paged query(BuilderJobQuery record) { + return builderJobRepository.query(record); + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/ConfigRepository.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/ConfigRepository.java new file mode 100644 index 000000000..492d104e7 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/ConfigRepository.java @@ -0,0 +1,53 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.service.config; + +import com.antgroup.openspg.server.common.model.config.Config; + +/** global config repository */ +public interface ConfigRepository { + + /** + * query a global config by configId and version + * + * @param configId + * @param version + * @return + */ + Config query(String configId, String version); + + /** + * get a config by id + * + * @param id + * @return + */ + Config getById(Long id); + + /** + * save a global config + * + * @param config + * @return + */ + Integer save(Config config); + + /** + * update a global config + * + * @param config + * @return + */ + Integer update(Config config); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/DefaultValue.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/DefaultValue.java new file mode 100644 index 000000000..65e2edef0 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/config/DefaultValue.java @@ -0,0 +1,52 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.service.config; + +import lombok.Getter; +import lombok.ToString; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +@ToString +@Getter +@Component +public class DefaultValue { + + @Value("${cloudext.graphstore.url:}") + private String graphStoreUrl; + + @Value("${cloudext.searchengine.url:}") + private String searchEngineUrl; + + @Value("${cloudext.objectstorage.url:}") + private String objectStorageUrl; + + @Value("${cloudext.computingengine.url:}") + private String computingEngineUrl; + + @Value("${schema.uri:}") + private String schemaUrlHost; + + @Value("${builder.model.execute.num:5}") + private Integer modelExecuteNum; + + @Value("${python.exec:}") + private String pythonExec; + + @Value("${python.paths:}") + private String pythonPaths; + + @Value("${objectStorage.builder.bucketName:}") + private String builderBucketName; +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceRepository.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceRepository.java new file mode 100644 index 000000000..20e8ee34c --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceRepository.java @@ -0,0 +1,39 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.datasource.DataSource; +import com.antgroup.openspg.server.common.model.datasource.DataSourceQuery; +import java.util.List; + +public interface DataSourceRepository { + + /** insert Job */ + Long insert(DataSource record); + + /** delete By Id */ + int deleteById(Long id); + + /** update Job */ + Long update(DataSource record); + + /** get By id */ + DataSource getById(Long id); + + /** query By Condition */ + Paged query(DataSourceQuery record); + + /** get DataSource Group By Type */ + List getGroupByType(DataSourceQuery record); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceService.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceService.java new file mode 100644 index 000000000..f967954c5 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/DataSourceService.java @@ -0,0 +1,50 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.datasource.Column; +import com.antgroup.openspg.server.common.model.datasource.DataSource; +import com.antgroup.openspg.server.common.model.datasource.DataSourceQuery; +import java.util.List; + +public interface DataSourceService { + + /** insert Job */ + Long insert(DataSource record); + + /** delete By Id */ + int deleteById(Long id); + + /** update Job */ + Long update(DataSource record); + + /** get By id */ + DataSource getById(Long id); + + /** query By Condition */ + Paged query(DataSourceQuery record); + + /** get DataSource Group By Type */ + List getGroupByType(DataSourceQuery record); + + List getAllDatabase(Long id); + + List getAllTable(Long id, String database, String keyword); + + List getTableDetail(Long id, String database, String table); + + Boolean testConnect(DataSource record); + + List getDataSourceType(String category); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/impl/DataSourceServiceImpl.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/impl/DataSourceServiceImpl.java new file mode 100644 index 000000000..b25aca9ab --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/impl/DataSourceServiceImpl.java @@ -0,0 +1,130 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.impl; + +import com.antgroup.openspg.common.util.ECBUtil; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.common.util.constants.CommonConstant; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.CommonEnum.DataSourceType; +import com.antgroup.openspg.server.common.model.datasource.Column; +import com.antgroup.openspg.server.common.model.datasource.DataSource; +import com.antgroup.openspg.server.common.model.datasource.DataSourceQuery; +import com.antgroup.openspg.server.common.service.datasource.DataSourceRepository; +import com.antgroup.openspg.server.common.service.datasource.DataSourceService; +import com.antgroup.openspg.server.common.service.datasource.meta.DataSourceMeta; +import com.antgroup.openspg.server.common.service.datasource.meta.client.CloudDataSource; +import com.google.common.collect.Lists; +import java.util.List; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.util.Assert; + +@Service +@Slf4j +public class DataSourceServiceImpl implements DataSourceService { + + @Autowired private DataSourceRepository dataSourceRepository; + @Autowired private DataSourceMeta dataSourceMeta; + + @Override + public Long insert(DataSource record) { + if (StringUtils.isBlank(record.getDbDriverName())) { + record.setDbDriverName(record.getType().getDriver()); + } + String encrypt = ECBUtil.encrypt(record.getDbPassword(), CommonConstant.ECB_PASSWORD_KEY); + record.setDbPassword(encrypt); + record.setEncrypt(encrypt); + Boolean flag = dataSourceMeta.testConnect(CloudDataSource.toCloud(record)); + Assert.isTrue(flag, "Connection test failed"); + return dataSourceRepository.insert(record); + } + + @Override + public int deleteById(Long id) { + return dataSourceRepository.deleteById(id); + } + + @Override + public Long update(DataSource record) { + if (StringUtils.isBlank(record.getDbDriverName())) { + record.setDbDriverName(record.getType().getDriver()); + } + setEncrypt(record); + Boolean flag = dataSourceMeta.testConnect(CloudDataSource.toCloud(record)); + Assert.isTrue(flag, "Connection test failed"); + return dataSourceRepository.update(record); + } + + private void setEncrypt(DataSource record) { + if (CommonConstant.DEFAULT_PASSWORD.equals(record.getDbPassword())) { + DataSource sourceDTO = dataSourceRepository.getById(record.getId()); + record.setDbPassword(sourceDTO.getDbPassword()); + record.setEncrypt(sourceDTO.getEncrypt()); + } else { + String encrypt = ECBUtil.encrypt(record.getDbPassword(), CommonConstant.ECB_PASSWORD_KEY); + record.setDbPassword(encrypt); + record.setEncrypt(encrypt); + } + } + + @Override + public DataSource getById(Long id) { + return dataSourceRepository.getById(id); + } + + @Override + public Paged query(DataSourceQuery record) { + return dataSourceRepository.query(record); + } + + @Override + public List getGroupByType(DataSourceQuery record) { + return dataSourceRepository.getGroupByType(record); + } + + @Override + public List getAllDatabase(Long id) { + return dataSourceMeta.showDatabases(CloudDataSource.toCloud(getById(id))); + } + + @Override + public List getAllTable(Long id, String database, String keyword) { + return dataSourceMeta.showTables(CloudDataSource.toCloud(getById(id)), database, keyword); + } + + @Override + public List getTableDetail(Long id, String database, String table) { + return dataSourceMeta.describeTable(CloudDataSource.toCloud(getById(id)), database, table); + } + + @Override + public Boolean testConnect(DataSource record) { + return dataSourceMeta.testConnect(CloudDataSource.toCloud(record)); + } + + @Override + public List getDataSourceType(String category) { + List types = Lists.newArrayList(); + DataSourceType[] allType = DataSourceType.values(); + for (DataSourceType type : allType) { + if (StringUtils.isNotBlank(category) + && !type.getCategory().name().equalsIgnoreCase(category)) { + continue; + } + types.add(new Column(type.name(), type.getCategory().name(), type.getName())); + } + return types; + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/DataSourceMeta.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/DataSourceMeta.java new file mode 100644 index 000000000..d96fa3817 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/DataSourceMeta.java @@ -0,0 +1,183 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta; + +import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.server.common.model.datasource.Column; +import com.antgroup.openspg.server.common.service.datasource.meta.client.CloudDataSource; +import com.antgroup.openspg.server.common.service.datasource.meta.client.DataSourceMetaClient; +import com.antgroup.openspg.server.common.service.datasource.meta.client.DataSourceMetaFactory; +import java.util.List; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Component; +import org.springframework.util.Assert; + +@Component +@Slf4j +public class DataSourceMeta { + + public List describeTable(CloudDataSource dataSource, String database, String tableName) { + log.info("describeTable:", JSON.toJSONString(dataSource), database, tableName); + Assert.notNull(dataSource, "datasource"); + Assert.notNull(database, "database"); + Assert.notNull(tableName, "table"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + log.info("describeTable start:", database, tableName); + List columns = client.describeTable(dataSource, database, tableName); + long cost = System.currentTimeMillis() - start; + log.info("describeTable end:", database, tableName, cost); + + return columns; + } + + public List showDatabases(CloudDataSource dataSource) { + log.info("showDatabases:", JSON.toJSONString(dataSource)); + Assert.notNull(dataSource, "datasource"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + log.info("showDatabases start"); + List dbs = client.showDatabases(dataSource); + long cost = System.currentTimeMillis() - start; + log.info("showDatabases end:", cost); + + return dbs; + } + + public List showTables(CloudDataSource dataSource, String database, String keyword) { + log.info("showTables:", JSON.toJSONString(dataSource), database); + Assert.notNull(dataSource, "datasource"); + Assert.notNull(database, "database"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + log.info("showTables start:", database); + List tables = client.showTables(dataSource, database, keyword); + long cost = System.currentTimeMillis() - start; + log.info("showTables end:", database, cost); + + return tables; + } + + public Boolean isPartitionTable(CloudDataSource dataSource, String database, String tableName) { + log.info("isPartitionTable:", JSON.toJSONString(dataSource), database, tableName); + Assert.notNull(dataSource, "datasource"); + Assert.notNull(database, "database"); + Assert.notNull(tableName, "table"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + log.info("isPartitionTable start:", database, tableName); + Boolean isPartition = client.isPartitionTable(dataSource, database, tableName); + long cost = System.currentTimeMillis() - start; + log.info("isPartitionTable end:", database, tableName, cost); + + return isPartition; + } + + public Boolean testConnect(CloudDataSource dataSource) { + log.info("testConnect:", JSON.toJSONString(dataSource)); + Assert.notNull(dataSource, "datasource"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + log.info("testConnect start"); + Boolean flag = client.testConnect(dataSource); + long cost = System.currentTimeMillis() - start; + log.info("testConnect end:", cost); + + return flag; + } + + public List> sampleDateForPartition( + CloudDataSource dataSource, + String dataSourceId, + String partitionStr, + String bizDate, + Integer limit) { + log.info( + "sampleDateForPartition:", + JSON.toJSONString(dataSource), + dataSourceId, + partitionStr, + bizDate, + limit); + Assert.notNull(dataSource, "datasource"); + Assert.notNull(dataSourceId, "database.table"); + Assert.notNull(partitionStr, "partition"); + Assert.notNull(limit, "limit"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + + log.info("sampleDateForPartition start:", dataSourceId, partitionStr, bizDate, limit); + List> data = + client.sampleDateForPartition(dataSource, dataSourceId, partitionStr, bizDate, limit); + long cost = System.currentTimeMillis() - start; + log.info("sampleDateForPartition end:", dataSourceId, partitionStr, bizDate, limit, cost); + + return data; + } + + public Boolean hasPartition( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + log.info("hasPartition:", JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate); + Assert.notNull(dataSource, "datasource"); + Assert.notNull(dataSourceId, "database.table"); + Assert.notNull(partitionStr, "partition"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + + log.info("hasPartition start:", dataSourceId, partitionStr, bizDate); + Boolean hasPartition = client.hasPartition(dataSource, dataSourceId, partitionStr, bizDate); + long cost = System.currentTimeMillis() - start; + log.info("hasPartition end:", dataSourceId, partitionStr, bizDate, cost); + return hasPartition; + } + + public Long getRecordCount( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + log.info("getRecordCount:", JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate); + Assert.notNull(dataSource, "datasource"); + Assert.notNull(dataSourceId, "database.table"); + Assert.notNull(partitionStr, "partition"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + + log.info("getRecordCount start:", dataSourceId, partitionStr, bizDate); + Long count = client.getRecordCount(dataSource, dataSourceId, partitionStr, bizDate); + long cost = System.currentTimeMillis() - start; + log.info("getRecordCount end:", dataSourceId, partitionStr, bizDate, cost); + return count; + } + + public List getAllPartitions(CloudDataSource dataSource, String dataSourceId) { + log.info("getAllPartitions:", JSON.toJSONString(dataSource), dataSourceId); + Assert.notNull(dataSource, "datasource"); + Assert.notNull(dataSourceId, "database.table"); + + long start = System.currentTimeMillis(); + DataSourceMetaClient client = DataSourceMetaFactory.getInstance(dataSource.getType()); + + log.info("getAllPartitions start:", dataSourceId); + List partitions = client.getAllPartitions(dataSource, dataSourceId); + long cost = System.currentTimeMillis() - start; + log.info("getAllPartitions end:", dataSourceId, cost); + return partitions; + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/CloudDataSource.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/CloudDataSource.java new file mode 100644 index 000000000..60a0f88bd --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/CloudDataSource.java @@ -0,0 +1,60 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.DozerBeanMapperUtil; +import com.antgroup.openspg.common.util.ECBUtil; +import com.antgroup.openspg.common.util.constants.CommonConstant; +import com.antgroup.openspg.server.common.model.CommonEnum.DataSourceCategory; +import com.antgroup.openspg.server.common.model.CommonEnum.DataSourceType; +import com.antgroup.openspg.server.common.model.CommonEnum.Status; +import com.antgroup.openspg.server.common.model.base.BaseModel; +import com.antgroup.openspg.server.common.model.datasource.DataSource; +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class CloudDataSource extends BaseModel { + + private static final long serialVersionUID = -4123605193544774990L; + + private Long id; + private Date gmtCreate; + private Date gmtModified; + private String createUser; + private String updateUser; + private Status status; + private String remark; + private DataSourceType type; + private String dbName; + private String dbUrl; + private String dbUser; + private String dbPassword; + private String encrypt; + private String dbDriverName; + private DataSourceCategory category; + private JSONObject connectionInfo; + + public static CloudDataSource toCloud(DataSource dataSource) { + if (dataSource == null) { + return null; + } + CloudDataSource cloudDataSource = DozerBeanMapperUtil.map(dataSource, CloudDataSource.class); + cloudDataSource.setDbPassword( + ECBUtil.decrypt(dataSource.getEncrypt(), CommonConstant.ECB_PASSWORD_KEY)); + return cloudDataSource; + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaClient.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaClient.java new file mode 100644 index 000000000..48c51c594 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaClient.java @@ -0,0 +1,115 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client; + +import com.antgroup.openspg.server.common.model.datasource.Column; +import java.util.List; +import java.util.Map; + +/** + * 数据源的元数据信息 + * + * @author yangjin + */ +public interface DataSourceMetaClient { + + /** + * get table column info + * + * @param dataSource + * @return + */ + List describeTable(CloudDataSource dataSource, String database, String tableName); + + /** + * show database + * + * @param dataSource + * @return + */ + List showDatabases(CloudDataSource dataSource); + + /** + * show tables + * + * @param dataSource + * @return + */ + List showTables(CloudDataSource dataSource, String database, String keyword); + + /** + * is Partition Table + * + * @param dataSource + * @return + */ + Boolean isPartitionTable(CloudDataSource dataSource, String database, String tableName); + + /** + * test connection + * + * @param dataSource + * @return + */ + Boolean testConnect(CloudDataSource dataSource); + + /** + * get sampleDate of partition + * + * @param dataSource 数据源配置 + * @param dataSourceId 库名称.表名称 + * @param partitionStr 多分区信息 + * @param bizDate 周期任务的日期 + * @param limit 抽样条数 + * @return + */ + List> sampleDateForPartition( + CloudDataSource dataSource, + String dataSourceId, + String partitionStr, + String bizDate, + Integer limit); + + /** + * check has partition + * + * @param dataSource 数据源配置 + * @param dataSourceId 库名称.表名称 + * @param partitionStr 多分区信息 + * @param bizDate 周期任务的日期 + * @return + */ + Boolean hasPartition( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate); + + /** + * get partition count + * + * @param dataSource 数据源配置 + * @param dataSourceId 库名称.表名称 + * @param partitionStr 多分区信息 + * @param bizDate 周期任务的日期 + * @return + */ + Long getRecordCount( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate); + + /** + * get all partition of table + * + * @param dataSource 数据源配置 + * @param dataSourceId 库名称.表名称 + * @return + */ + List getAllPartitions(CloudDataSource dataSource, String dataSourceId); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaFactory.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaFactory.java new file mode 100644 index 000000000..aeb23aac6 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/DataSourceMetaFactory.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client; + +import com.antgroup.openspg.server.common.model.CommonEnum; +import com.antgroup.openspg.server.common.service.datasource.meta.client.impl.DefaultMetaClientImpl; +import com.antgroup.openspg.server.common.service.datasource.meta.client.impl.JdbcMetaClientImpl; +import com.antgroup.openspg.server.common.service.datasource.meta.client.impl.OdpsMetaClientImpl; +import org.springframework.util.Assert; + +public class DataSourceMetaFactory { + + public static DataSourceMetaClient getInstance(CommonEnum.DataSourceType type) { + Assert.notNull(type, "datasource type"); + DataSourceMetaClient dataSourceMetaClient; + switch (type) { + case HIVE: + case MYSQL: + case ORACLE: + case POSTGRESQL: + case DB2: + case MARIA_DB: + case MS_SQL: + dataSourceMetaClient = new JdbcMetaClientImpl(type.getDriver()); + break; + case ODPS: + dataSourceMetaClient = new OdpsMetaClientImpl(); + break; + default: + dataSourceMetaClient = new DefaultMetaClientImpl(); + break; + } + return dataSourceMetaClient; + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/DefaultMetaClientImpl.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/DefaultMetaClientImpl.java new file mode 100644 index 000000000..c11b9ca67 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/DefaultMetaClientImpl.java @@ -0,0 +1,82 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client.impl; + +import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.server.common.model.datasource.Column; +import com.antgroup.openspg.server.common.service.datasource.meta.client.CloudDataSource; +import com.antgroup.openspg.server.common.service.datasource.meta.client.DataSourceMetaClient; +import java.util.List; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class DefaultMetaClientImpl implements DataSourceMetaClient { + + @Override + public List describeTable(CloudDataSource dataSource, String database, String tableName) { + throw new RuntimeException("Not Supported"); + } + + @Override + public List showDatabases(CloudDataSource dataSource) { + throw new RuntimeException("Not Supported"); + } + + @Override + public List showTables(CloudDataSource dataSource, String database, String keyword) { + throw new RuntimeException("Not Supported"); + } + + @Override + public Boolean isPartitionTable(CloudDataSource dataSource, String database, String tableName) { + throw new RuntimeException("Not Supported"); + } + + @Override + public Boolean testConnect(CloudDataSource dataSource) { + try { + return Boolean.TRUE; + } catch (Exception e) { + log.warn("testConnect Exception:" + JSON.toJSONString(dataSource), e); + throw new RuntimeException("testConnect Exception", e); + } + } + + @Override + public List> sampleDateForPartition( + CloudDataSource dataSource, + String dataSourceId, + String partitionStr, + String bizDate, + Integer limit) { + throw new RuntimeException("Not Supported"); + } + + @Override + public Boolean hasPartition( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + throw new RuntimeException("Not Supported"); + } + + @Override + public Long getRecordCount( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + throw new RuntimeException("Not Supported"); + } + + @Override + public List getAllPartitions(CloudDataSource dataSource, String dataSourceId) { + throw new RuntimeException("Not Supported"); + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcClient.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcClient.java new file mode 100644 index 000000000..7811a28c0 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcClient.java @@ -0,0 +1,84 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client.impl; + +import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.server.common.service.datasource.meta.client.CloudDataSource; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; + +@Slf4j +public class JdbcClient { + + public static Connection getClient(CloudDataSource store, String driver) { + try { + Class.forName(driver); + Connection connection; + if (StringUtils.isBlank(store.getDbUser()) || StringUtils.isBlank(store.getDbPassword())) { + connection = DriverManager.getConnection(store.getDbUrl()); + } else { + connection = + DriverManager.getConnection(store.getDbUrl(), store.getDbUser(), store.getDbPassword()); + } + log.info("Success getConnection with url " + store.getDbUrl()); + return connection; + } catch (ClassNotFoundException e) { + log.warn("getConnection ClassNotFoundException" + JSON.toJSONString(store), e); + throw new RuntimeException(e); + } catch (Exception e) { + log.warn("getConnection Exception" + JSON.toJSONString(store), e); + throw new RuntimeException(e); + } + } + + public static void closeResultSet(ResultSet res) { + try { + if (res == null || res.isClosed()) { + return; + } + res.close(); + } catch (SQLException e) { + log.warn("close ResultSet Exception" + JSON.toJSONString(res), e); + throw new RuntimeException(e); + } + } + + public static void closeStatement(Statement stmt) { + try { + if (stmt == null || stmt.isClosed()) { + return; + } + stmt.close(); + } catch (SQLException e) { + log.warn("close Statement Exception" + JSON.toJSONString(stmt), e); + throw new RuntimeException(e); + } + } + + public static void closeConnection(Connection conn) { + try { + if (conn == null || conn.isClosed()) { + return; + } + conn.close(); + } catch (SQLException e) { + log.warn("close Connection Exception" + JSON.toJSONString(conn), e); + throw new RuntimeException(e); + } + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcMetaClientImpl.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcMetaClientImpl.java new file mode 100644 index 000000000..d174c8581 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/JdbcMetaClientImpl.java @@ -0,0 +1,400 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client.impl; + +import com.alibaba.fastjson.JSON; +import com.antgroup.openspg.common.util.PartitionUtils; +import com.antgroup.openspg.server.common.model.datasource.Column; +import com.antgroup.openspg.server.common.service.datasource.meta.client.CloudDataSource; +import com.antgroup.openspg.server.common.service.datasource.meta.client.DataSourceMetaClient; +import com.google.common.collect.Lists; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; + +@Slf4j +public class JdbcMetaClientImpl implements DataSourceMetaClient { + + private static final String NUM_ROWS = "numRows "; + + private String driver; + + public JdbcMetaClientImpl(String driver) { + this.driver = driver; + } + + @Override + public List describeTable(CloudDataSource dataSource, String database, String tableName) { + + List columns = new ArrayList<>(); + Connection conn = null; + Statement stmt = null; + ResultSet res = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + stmt = conn.createStatement(); + if (StringUtils.isNotBlank(database)) { + stmt.execute("use " + database); + } + res = stmt.executeQuery("describe " + tableName); + while (res.next()) { + String name = res.getString(1); + String type = res.getString(2); + String comment = res.getString(3); + if (StringUtils.isBlank(name) || StringUtils.isBlank(type) || name.startsWith("#")) { + continue; + } + columns.add(new Column(name, type, comment)); + } + } catch (Exception e) { + log.warn( + String.format( + "jdbc describeTable Exception: %s %s %s", + JSON.toJSONString(dataSource), database, tableName), + e); + throw new RuntimeException("jdbc describeTable Exception", e); + } finally { + JdbcClient.closeResultSet(res); + JdbcClient.closeStatement(stmt); + JdbcClient.closeConnection(conn); + } + return columns; + } + + @Override + public List showDatabases(CloudDataSource dataSource) { + List dbs = new ArrayList<>(); + Connection conn = null; + Statement stmt = null; + ResultSet res = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + stmt = conn.createStatement(); + res = stmt.executeQuery("show databases"); + while (res.next()) { + String database = res.getString(1); + dbs.add(database); + } + } catch (Exception e) { + log.warn(String.format("jdbc showDatabases Exception: %s", JSON.toJSONString(dataSource)), e); + throw new RuntimeException("jdbc showDatabases Exception", e); + } finally { + JdbcClient.closeResultSet(res); + JdbcClient.closeStatement(stmt); + JdbcClient.closeConnection(conn); + } + return dbs; + } + + @Override + public List showTables(CloudDataSource dataSource, String database, String keyword) { + List tables = new ArrayList<>(); + Connection conn = null; + Statement stmt = null; + ResultSet res = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + stmt = conn.createStatement(); + stmt.execute("use " + database); + res = stmt.executeQuery("show tables"); + while (res.next()) { + String name = res.getString(1); + // 如果keyword为空直接返回 + if (StringUtils.isBlank(keyword)) { + tables.add(name); + continue; + } + if (name.contains(keyword)) { + tables.add(name); + } + } + } catch (Exception e) { + log.warn( + String.format( + "jdbc showTables Exception: %s %s", JSON.toJSONString(dataSource), database), + e); + throw new RuntimeException("jdbc showTables Exception", e); + } finally { + JdbcClient.closeResultSet(res); + JdbcClient.closeStatement(stmt); + JdbcClient.closeConnection(conn); + } + return tables; + } + + @Override + public Boolean isPartitionTable(CloudDataSource dataSource, String database, String tableName) { + Connection conn = null; + Statement stmt = null; + ResultSet res = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + stmt = conn.createStatement(); + if (StringUtils.isNotBlank(database)) { + stmt.execute("use " + database); + } + res = stmt.executeQuery("SHOW CREATE TABLE " + tableName); + if (res.next()) { + String createTableStatement = res.getString(1); + return createTableStatement.contains("PARTITIONED BY"); + } + } catch (Exception e) { + log.warn( + String.format( + "jdbc isPartitionTable Exception: %s %s %s", + JSON.toJSONString(dataSource), database, tableName), + e); + throw new RuntimeException("jdbc isPartitionTable Exception", e); + } finally { + JdbcClient.closeResultSet(res); + JdbcClient.closeStatement(stmt); + JdbcClient.closeConnection(conn); + } + return false; + } + + @Override + public Boolean testConnect(CloudDataSource dataSource) { + Connection conn = null; + Statement stmt = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + stmt = conn.createStatement(); + stmt.executeQuery("show databases"); + } catch (Exception e) { + log.warn(String.format("testConnect Exception: %s", JSON.toJSONString(dataSource)), e); + throw new RuntimeException("testConnect Exception", e); + } finally { + JdbcClient.closeStatement(stmt); + JdbcClient.closeConnection(conn); + } + return Boolean.TRUE; + } + + @Override + public List> sampleDateForPartition( + CloudDataSource dataSource, + String dataSourceId, + String partitionStr, + String bizDate, + Integer limit) { + List> data = Lists.newArrayList(); + Connection conn = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + List partitions = PartitionUtils.analysisPartition(partitionStr, bizDate, " and "); + for (String partition : partitions) { + String sql = "select * from " + dataSourceId + " where " + partition + " limit " + limit; + List> resList = executeQuery(conn, null, sql); + if (CollectionUtils.isNotEmpty(resList)) { + data.addAll(resList); + } + } + } catch (Exception e) { + log.warn( + String.format( + "jdbc sampleDateForPartition Exception: %s %s %s %s %s", + JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate, limit), + e); + throw new RuntimeException("jdbc sampleDateForPartition Exception", e); + } finally { + JdbcClient.closeConnection(conn); + } + return data; + } + + @Override + public Boolean hasPartition( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + Connection conn = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + List partitions = PartitionUtils.analysisPartition(partitionStr, bizDate); + for (String partition : partitions) { + Boolean hasPartitionSpec = hasPartition(conn, dataSourceId, partition); + if (hasPartitionSpec) { + return Boolean.TRUE; + } + } + return Boolean.FALSE; + } catch (Exception e) { + log.warn( + String.format( + "jdbc hasPartition Exception: %s %s %s %s", + JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate), + e); + throw new RuntimeException("jdbc hasPartition Exception", e); + } finally { + JdbcClient.closeConnection(conn); + } + } + + @Override + public Long getRecordCount( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + + Long count = 0L; + Connection conn = null; + Statement stmt = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + stmt = conn.createStatement(); + List partitions = PartitionUtils.analysisPartition(partitionStr, bizDate); + for (String partition : partitions) { + partition = partition.replaceAll("/", ","); + String sql = "desc formatted " + dataSourceId + " partition(" + partition + ")"; + Long num = getCount(stmt, sql); + if (num <= 0) { + String whereSql = getParameterSql(partition); + sql = "SELECT COUNT(*) FROM " + dataSourceId + " where " + whereSql; + num = getCountBySql(stmt, sql); + } + count = count + num; + } + } catch (Exception e) { + log.warn( + String.format( + "jdbc getRecordCount Exception: %s %s %s %s", + JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate), + e); + throw new RuntimeException("jdbc getRecordCount Exception", e); + } finally { + JdbcClient.closeStatement(stmt); + JdbcClient.closeConnection(conn); + } + return count; + } + + public static String getParameterSql(String partition) { + return partition.replaceAll("'", "\"").replaceAll(",", " and ").replaceAll("/", " and "); + } + + @Override + public List getAllPartitions(CloudDataSource dataSource, String dataSourceId) { + List partitions = new ArrayList<>(); + Connection conn = null; + Statement stmt = null; + ResultSet res = null; + try { + conn = JdbcClient.getClient(dataSource, driver); + stmt = conn.createStatement(); + res = stmt.executeQuery("show partitions " + dataSourceId); + while (res.next()) { + String partition = res.getString(1); + partitions.add(partition); + } + } catch (Exception e) { + log.warn( + String.format( + "jdbc getAllPartitions Exception: %s %s", + JSON.toJSONString(dataSource), dataSourceId), + e); + throw new RuntimeException("jdbc getAllPartitions Exception", e); + } finally { + JdbcClient.closeResultSet(res); + JdbcClient.closeStatement(stmt); + JdbcClient.closeConnection(conn); + } + return partitions; + } + + public static List> executeQuery( + Connection conn, String database, String sql) { + long start = System.currentTimeMillis(); + log.info(String.format("[jdbc executeQuery start] database:%s sql:%s", database, sql)); + List> resList = new ArrayList<>(); + Statement stmt = null; + ResultSet res = null; + try { + stmt = conn.createStatement(); + if (StringUtils.isNotBlank(database)) { + stmt.execute("use " + database); + } + res = stmt.executeQuery(sql); + ResultSetMetaData rsmd = res.getMetaData(); + int count = rsmd.getColumnCount(); + while (res.next()) { + Map row = new HashMap<>(); + for (int i = 1; i <= count; i++) { + row.put(rsmd.getColumnName(i), res.getObject(i)); + } + resList.add(row); + } + long cost = System.currentTimeMillis() - start; + log.info( + String.format("[jdbc executeQuery] database:%s sql:%s cost:%s", database, sql, cost)); + } catch (Exception e) { + log.error( + String.format("[jdbc executeQuery Exception] database:%s sql:%s", database, sql), e); + throw new RuntimeException(e); + } finally { + JdbcClient.closeResultSet(res); + JdbcClient.closeStatement(stmt); + } + return resList; + } + + /** + * 判定是否有分区信息 + * + * @param conn + * @param conn + * @param partitionStr + * @return + */ + private static Boolean hasPartition(Connection conn, String dataSourceId, String partitionStr) { + String sql = "show partitions " + dataSourceId + " partition(" + partitionStr + ")"; + List> resList = executeQuery(conn, null, sql); + return resList.size() > 0; + } + + private static Long getCount(Statement stmt, String sql) { + Long count = 0L; + ResultSet res = null; + try { + res = stmt.executeQuery(sql); + while (res.next()) { + String key = res.getString(2); + String value = res.getString(3); + if (NUM_ROWS.equalsIgnoreCase(key)) { + String numRows = value.trim(); + count = count + Long.valueOf(numRows); + } + } + } catch (Exception e) { + log.error(String.format("[jdbc getCount Exception] sql:%s", sql), e); + } finally { + JdbcClient.closeResultSet(res); + } + return count; + } + + private static Long getCountBySql(Statement stmt, String sql) throws SQLException { + Long count = 0L; + ResultSet res = stmt.executeQuery(sql); + if (res.next()) { + count = res.getLong(1); + } + JdbcClient.closeResultSet(res); + return count; + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsClient.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsClient.java new file mode 100644 index 000000000..ac30895e1 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsClient.java @@ -0,0 +1,235 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client.impl; + +import com.alibaba.fastjson.JSONObject; +import com.aliyun.odps.Column; +import com.aliyun.odps.Odps; +import com.aliyun.odps.PartitionSpec; +import com.aliyun.odps.Table; +import com.aliyun.odps.TableSchema; +import com.aliyun.odps.account.Account; +import com.aliyun.odps.account.AliyunAccount; +import com.aliyun.odps.data.Record; +import com.aliyun.odps.tunnel.TableTunnel; +import com.antgroup.openspg.common.util.RetryerUtil; +import com.antgroup.openspg.common.util.constants.CommonConstant; +import com.antgroup.openspg.server.common.service.datasource.meta.client.CloudDataSource; +import com.github.rholder.retry.Retryer; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.google.common.collect.Maps; +import java.io.Serializable; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; +import org.springframework.util.Assert; + +@Slf4j +public class OdpsClient { + + private static Retryer retry = RetryerUtil.getRetryer(50L, 2L, 5); + + /** downloadSession cache */ + private static final Cache downloadSession_CACHE = + CacheBuilder.newBuilder().maximumSize(5000).expireAfterWrite(4, TimeUnit.MINUTES).build(); + + /** + * 获取odps客户端 + * + * @param store + * @param defaultProject + * @return + */ + public static Odps getClient(CloudDataSource store, String defaultProject) { + Account account = new AliyunAccount(store.getDbUser(), store.getDbPassword()); + Odps odps = new Odps(account); + odps.setEndpoint(store.getDbUrl()); + if (defaultProject != null) { + odps.setDefaultProject(defaultProject); + } + return odps; + } + + /** + * ge table tunnel + * + * @param odps + * @param store + * @return + */ + public static TableTunnel createTableTunnel(Odps odps, CloudDataSource store) { + TableTunnel tunnel = new TableTunnel(odps); + JSONObject property = store.getConnectionInfo(); + if (property.containsKey(CommonConstant.TUNNEL_ENDPOINT)) { + tunnel.setEndpoint(property.getString(CommonConstant.TUNNEL_ENDPOINT)); + } + return tunnel; + } + + /** + * create download session + * + * @param tunnel + * @param project + * @param table + * @param partitionSpec + * @return + */ + public static TableTunnel.DownloadSession createDownloadSession( + TableTunnel tunnel, String project, String table, PartitionSpec partitionSpec) { + Assert.notNull(partitionSpec, "createDownloadSession partitionSpec not null"); + String cacheKey = project + "." + table + ":" + partitionSpec; + try { + return downloadSession_CACHE.get( + cacheKey, + () -> { + long start = System.currentTimeMillis(); + TableTunnel.DownloadSession downloadSession = + retryableCreateDownLoadSession(tunnel, project, table, partitionSpec); + long cost = System.currentTimeMillis() - start; + log.info("createDownloadSession {} end:{}", cacheKey, cost); + return downloadSession; + }); + } catch (Exception e) { + log.warn("createDownloadSession Exception:" + cacheKey, e); + throw new RuntimeException("createDownloadSession Exception", e); + } + } + + /** + * 获取指定表 + * + * @param odps + * @param project + * @param table + * @return + */ + public static Table getTable(Odps odps, String project, String table) { + try { + long start = System.currentTimeMillis(); + Table t = odps.tables().get(project, table); + t.reload(); + long cost = System.currentTimeMillis() - start; + log.info("get table end:", project, table, cost); + return t; + } catch (Exception e) { + log.warn(String.format("get table %s %s Exception:", project, table), e); + throw new RuntimeException("get table Exception", e); + } + } + + /** + * 重试机制获取获取DownloadSession + * + * @param tunnel + * @return + */ + private static TableTunnel.DownloadSession retryableCreateDownLoadSession( + TableTunnel tunnel, String project, String table, PartitionSpec partitionSpec) + throws Exception { + AtomicReference downloadSession = new AtomicReference<>(); + retry.call( + () -> { + try { + if (partitionSpec == null) { + downloadSession.set(tunnel.createDownloadSession(project, table)); + } else { + downloadSession.set(tunnel.createDownloadSession(project, table, partitionSpec)); + } + return true; + } catch (Throwable e) { + log.warn( + String.format("retry create DownLoadSession %s %s Exception:", project, table), e); + throw new RuntimeException("retry create DownLoadSession Exception", e); + } + }); + return downloadSession.get(); + } + + /** + * 根据完整的名称获取项目和表名 + * + * @param + * @return + * @author 庄舟 + * @date 2021/2/22 17:44 + */ + public static String[] getProjectAndTable(String sourceId) { + sourceId = sourceId.replaceFirst("odps\\.", ""); + String[] split = StringUtils.split(sourceId, "."); + Assert.isTrue(split.length == 2, "数据源格式必须为projectName.tableName,当前格式为:" + sourceId); + return split; + } + + /** + * 解析record数据成map + * + * @param record + * @param schema + * @return + */ + public static Map consumeRecord(Record record, TableSchema schema) { + Map map = Maps.newHashMap(); + List allColumn = schema.getColumns(); + allColumn.addAll(schema.getPartitionColumns()); + for (int i = 0; i < allColumn.size(); i++) { + Column column = allColumn.get(i); + String colValue; + switch (column.getTypeInfo().getOdpsType()) { + case BIGINT: + { + Long v = record.getBigint(i); + colValue = getString(v); + break; + } + case BOOLEAN: + { + Boolean v = record.getBoolean(i); + colValue = getString(v); + break; + } + case DATETIME: + { + Date v = record.getDatetime(i); + colValue = getString(v); + break; + } + case DOUBLE: + { + Double v = record.getDouble(i); + colValue = getString(v); + break; + } + case STRING: + { + String v = record.getString(i); + colValue = v == null ? null : v; + break; + } + default: + throw new RuntimeException("Unknown column type: " + column.getTypeInfo().getOdpsType()); + } + map.put(column.getName(), colValue); + } + return map; + } + + private static String getString(Serializable v) { + return v == null ? null : v.toString(); + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsMetaClientImpl.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsMetaClientImpl.java new file mode 100644 index 000000000..75cea0cf3 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/datasource/meta/client/impl/OdpsMetaClientImpl.java @@ -0,0 +1,341 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.common.service.datasource.meta.client.impl; + +import com.alibaba.fastjson.JSON; +import com.aliyun.odps.Instance; +import com.aliyun.odps.Odps; +import com.aliyun.odps.OdpsException; +import com.aliyun.odps.Partition; +import com.aliyun.odps.PartitionSpec; +import com.aliyun.odps.Project; +import com.aliyun.odps.ProjectFilter; +import com.aliyun.odps.Projects; +import com.aliyun.odps.Table; +import com.aliyun.odps.TableSchema; +import com.aliyun.odps.data.Record; +import com.aliyun.odps.data.RecordReader; +import com.aliyun.odps.task.SQLTask; +import com.aliyun.odps.tunnel.TableTunnel; +import com.antgroup.openspg.common.util.PartitionUtils; +import com.antgroup.openspg.server.common.model.datasource.Column; +import com.antgroup.openspg.server.common.service.datasource.meta.client.CloudDataSource; +import com.antgroup.openspg.server.common.service.datasource.meta.client.DataSourceMetaClient; +import com.csvreader.CsvReader; +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.google.common.collect.Lists; +import java.io.IOException; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.springframework.util.Assert; + +@Slf4j +public class OdpsMetaClientImpl implements DataSourceMetaClient { + + /** AllPartition Cache */ + private static final Cache> ALL_PARTITION_CACHE = + CacheBuilder.newBuilder().maximumSize(5000).expireAfterWrite(4, TimeUnit.MINUTES).build(); + + /** AllDatabases Cache */ + private static final Cache> ALL_DATABASES_CACHE = + CacheBuilder.newBuilder().maximumSize(5000).expireAfterWrite(4, TimeUnit.HOURS).build(); + + @Override + public List describeTable(CloudDataSource dataSource, String database, String tableName) { + try { + Odps odps = OdpsClient.getClient(dataSource, database); + + Table table = odps.tables().get(database, tableName); + if (null == table) { + return null; + } + List columns = Lists.newArrayList(); + TableSchema tableSchema = table.getSchema(); + columns.addAll(tableSchema.getColumns()); + columns.addAll(tableSchema.getPartitionColumns()); + + List columnList = new ArrayList<>(columns.size()); + for (com.aliyun.odps.Column column : columns) { + Column columnInfo = + new Column( + column.getName(), column.getTypeInfo().getOdpsType().name(), column.getComment()); + columnList.add(columnInfo); + } + return columnList; + } catch (Exception e) { + log.warn( + String.format( + "odps describeTable %s %s %s Exception:", + JSON.toJSONString(dataSource), database, tableName), + e); + throw new RuntimeException("odps describeTable Exception", e); + } + } + + @Override + public List showDatabases(CloudDataSource dataSource) { + try { + return ALL_DATABASES_CACHE.get( + dataSource.getDbUrl(), + () -> { + Odps odps = OdpsClient.getClient(dataSource, null); + Projects projects = odps.projects(); + List projectList = Lists.newArrayList(); + Iterator iterator = projects.iteratorByFilter(new ProjectFilter()); + while (iterator.hasNext()) { + Project project = iterator.next(); + projectList.add(project.getName()); + } + return projectList; + }); + } catch (Exception e) { + log.warn(String.format("odps showDatabases %s Exception:", JSON.toJSONString(dataSource)), e); + throw new RuntimeException("odps showDatabases Exception", e); + } + } + + @Override + public List showTables(CloudDataSource dataSource, String database, String keyword) { + try { + Odps odps = OdpsClient.getClient(dataSource, database); + Assert.isTrue(odps.projects().exists(database), "database does not exist!"); + Instance i = SQLTask.run(odps, "SHOW TABLES LIKE '*" + keyword + "*';"); + i.waitForSuccess(); + return parse(i); + } catch (Exception e) { + log.warn(String.format("odps showTables %s Exception:", JSON.toJSONString(dataSource)), e); + throw new RuntimeException("odps showTables Exception", e); + } + } + + @Override + public Boolean isPartitionTable(CloudDataSource dataSource, String database, String tableName) { + try { + Odps odps = OdpsClient.getClient(dataSource, database); + Table table = odps.tables().get(database, tableName); + if (null == table) { + return false; + } + return table.isPartitioned(); + } catch (Exception e) { + log.warn( + String.format("odps isPartitionTable %s Exception:", JSON.toJSONString(dataSource)), e); + throw new RuntimeException("odps isPartitionTable Exception", e); + } + } + + public static List parse(Instance instance) throws OdpsException { + String selectResult = instance.getTaskResults().get("AnonymousSQLTask"); + CsvReader reader = new CsvReader(new StringReader(selectResult)); + reader.setSafetySwitch(false); + List records = new ArrayList(); + + try { + for (; reader.readRecord(); ) { + String[] newline = reader.getValues(); + for (int i = 0; i < newline.length; ++i) { + String value = newline[i]; + if (value.contains(":")) { + value = value.split(":")[1]; + } + records.add(value); + } + } + } catch (IOException e) { + throw new OdpsException("Error when parse sql results.", e); + } finally { + reader.close(); + } + return records; + } + + @Override + public Boolean testConnect(CloudDataSource dataSource) { + try { + Odps odps = OdpsClient.getClient(dataSource, null); + Projects projects = odps.projects(); + Iterator iterator = projects.iteratorByFilter(new ProjectFilter()); + while (iterator.hasNext()) { + iterator.next(); + return Boolean.TRUE; + } + return Boolean.TRUE; + } catch (Exception e) { + log.warn(String.format("odps testConnect %s Exception:", JSON.toJSONString(dataSource)), e); + throw new RuntimeException("odps testConnect Exception", e); + } + } + + @Override + public List> sampleDateForPartition( + CloudDataSource dataSource, + String dataSourceId, + String partitionStr, + String bizDate, + Integer limit) { + List> data = Lists.newArrayList(); + + try { + String project = OdpsClient.getProjectAndTable(dataSourceId)[0]; + String table = OdpsClient.getProjectAndTable(dataSourceId)[1]; + Odps odps = OdpsClient.getClient(dataSource, project); + TableTunnel tableTunnel = OdpsClient.createTableTunnel(odps, dataSource); + List partitions = PartitionUtils.analysisPartition(partitionStr, bizDate); + for (String partition : partitions) { + PartitionSpec spec = new PartitionSpec(partition); + TableTunnel.DownloadSession downloadSession = + OdpsClient.createDownloadSession(tableTunnel, project, table, spec); + Table t = OdpsClient.getTable(odps, project, table); + RecordReader recordReader = t.read(spec, null, limit); + Record record; + while ((record = recordReader.read()) != null) { + data.add(OdpsClient.consumeRecord(record, downloadSession.getSchema())); + } + } + } catch (Exception e) { + log.warn( + String.format( + "odps sampleDateForPartition %s %s %s %s %s Exception:", + JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate, limit), + e); + throw new RuntimeException("odps sampleDateForPartition Exception", e); + } + + return data; + } + + @Override + public Boolean hasPartition( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + try { + String project = OdpsClient.getProjectAndTable(dataSourceId)[0]; + String table = OdpsClient.getProjectAndTable(dataSourceId)[1]; + Odps odps = OdpsClient.getClient(dataSource, project); + Table t = OdpsClient.getTable(odps, project, table); + Assert.isTrue(t.isPartitioned(), "The table is not a partitioned table!"); + + List partitions = PartitionUtils.analysisPartition(partitionStr, bizDate); + for (String partition : partitions) { + PartitionSpec partitionSpec = new PartitionSpec(partition); + Boolean hasPartitionSpec = t.hasPartition(partitionSpec); + if (hasPartitionSpec) { + return Boolean.TRUE; + } + } + return Boolean.FALSE; + } catch (Exception e) { + log.warn( + String.format( + "odps hasPartition %s %s %s %s Exception:", + JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate), + e); + throw new RuntimeException("hasPartition Exception", e); + } + } + + @Override + public Long getRecordCount( + CloudDataSource dataSource, String dataSourceId, String partitionStr, String bizDate) { + Long count = 0L; + try { + String project = OdpsClient.getProjectAndTable(dataSourceId)[0]; + String table = OdpsClient.getProjectAndTable(dataSourceId)[1]; + Odps odps = OdpsClient.getClient(dataSource, project); + TableTunnel tableTunnel = OdpsClient.createTableTunnel(odps, dataSource); + + List partitions = PartitionUtils.analysisPartition(partitionStr, bizDate); + for (String partition : partitions) { + PartitionSpec partitionSpec = new PartitionSpec(partition); + TableTunnel.DownloadSession downloadSession = + OdpsClient.createDownloadSession(tableTunnel, project, table, partitionSpec); + long num = downloadSession.getRecordCount(); + count = count + num; + } + } catch (Exception e) { + log.warn( + String.format( + "getRecordCount %s %s %s %s Exception:", + JSON.toJSONString(dataSource), dataSourceId, partitionStr, bizDate), + e); + throw new RuntimeException("getRecordCount Exception", e); + } + return count; + } + + @Override + public List getAllPartitions(CloudDataSource dataSource, String dataSourceId) { + String project = OdpsClient.getProjectAndTable(dataSourceId)[0]; + String table = OdpsClient.getProjectAndTable(dataSourceId)[1]; + try { + Odps odps = OdpsClient.getClient(dataSource, project); + return ALL_PARTITION_CACHE.get( + dataSourceId, () -> getPartitionsNoCache(odps, project, table)); + } catch (Exception e) { + log.warn( + String.format( + "odps getAllPartitions %s %s Exception:", + JSON.toJSONString(dataSource), dataSourceId), + e); + throw new RuntimeException("odps getAllPartitions Exception", e); + } + } + + /** + * get odps Partitions No Cache + * + * @param odps + * @param project + * @param table + * @return + * @throws OdpsException + */ + public static List getPartitionsNoCache(Odps odps, String project, String table) + throws OdpsException { + Table t = getTable(odps, project, table); + List partitions = Lists.newArrayList(); + long start = System.currentTimeMillis(); + List allPartitions = t.getPartitions(); + if (CollectionUtils.isEmpty(allPartitions)) { + return partitions; + } + long cost = System.currentTimeMillis() - start; + log.info("get all partitions: {} {} {}", project, table, cost); + allPartitions.forEach(el -> partitions.add(el.getPartitionSpec().toString(false, true))); + return partitions; + } + + /** + * get odps Table + * + * @param odps + * @param project + * @param table + * @return + * @throws OdpsException + */ + public static Table getTable(Odps odps, String project, String table) throws OdpsException { + long start = System.currentTimeMillis(); + Table t = odps.tables().get(project, table); + t.reload(); + long cost = System.currentTimeMillis() - start; + log.info("get odps table: {} {} {}", project, table, cost); + return t; + } +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/permission/PermissionRepository.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/permission/PermissionRepository.java new file mode 100644 index 000000000..ea9e6d0b5 --- /dev/null +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/permission/PermissionRepository.java @@ -0,0 +1,133 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.common.service.permission; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.permission.Permission; +import java.util.List; + +/** permission repository, contain PROJECT、SCHEMA、INDEX */ +public interface PermissionRepository { + + /** + * create permission + * + * @param permission + * @return + */ + Integer save(Permission permission); + + /** + * update permission + * + * @param permission + * @return + */ + Integer update(Permission permission); + + /** + * query all permission by resourceId + * + * @param resourceId + * @param resourceTag + * @param page + * @param pageSize + * @return + */ + List query(Long resourceId, String resourceTag, Integer page, Integer pageSize); + + /** + * query by roleId nad the part of userNo + * + * @param userNo + * @param roleId + * @param resourceId + * @param resourceTag + * @param page + * @param pageSize + * @return + */ + List queryByUserNoAndRoleId( + String userNo, + Long roleId, + Long resourceId, + String resourceTag, + Integer page, + Integer pageSize); + + /** + * query page + * + * @param userNo + * @param roleId + * @param resourceId + * @param resourceTag + * @param page + * @param pageSize + * @return + */ + Paged queryPage( + String userNo, + Long roleId, + Long resourceId, + String resourceTag, + Integer page, + Integer pageSize); + + /** + * delete permission + * + * @param permission + * @return + */ + Integer delete(Permission permission); + + /** + * get by resourceIds and resourceTag + * + * @param resourceIds + * @param resourceTag + * @return + */ + List selectByResourceIdsAndResourceTag(List resourceIds, String resourceTag); + + /** + * get by userRoles and resourceIds + * + * @param resourceIds + * @param userNo + * @param roleType + * @param resourceTag + * @return + */ + List getPermissionByUserRolesAndId( + List resourceIds, String userNo, String roleType, String resourceTag); + + /** + * get by userNo and resourceTag + * + * @param userNo + * @param resourceTag + * @return + */ + List getPermissionByUserNoAndResourceTag(String userNo, String resourceTag); + + /** + * get by id + * + * @param id + * @return + */ + Permission selectByPrimaryKey(Long id); +} diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/project/ProjectRepository.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/project/ProjectRepository.java index a1dc1147c..f88daf55c 100644 --- a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/project/ProjectRepository.java +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/project/ProjectRepository.java @@ -31,4 +31,6 @@ public interface ProjectRepository { Integer deleteById(Long projectId); Paged queryPaged(ProjectQueryRequest request, int start, int size); + + Project queryByNamespace(String namespace); } diff --git a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/spring/SpringContextHolder.java b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/spring/SpringContextHolder.java index 0cdf365b6..c0d62e495 100644 --- a/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/spring/SpringContextHolder.java +++ b/server/common/service/src/main/java/com/antgroup/openspg/server/common/service/spring/SpringContextHolder.java @@ -15,6 +15,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -55,4 +56,11 @@ public static List getBeans(Class clazz) { } return null; } + + public static Map getBeanMap(Class clazz) { + if (applicationContext != null) { + return applicationContext.getBeansOfType(clazz); + } + return null; + } } diff --git a/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/impl/Utils.java b/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/impl/Utils.java index 954d4a243..948080141 100644 --- a/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/impl/Utils.java +++ b/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/impl/Utils.java @@ -34,13 +34,13 @@ public class Utils { public static List getAllRdfEntity( - GraphState graphState, IVertexId id) { + GraphState graphState, IVertexId id, String rdfType) { List result = new ArrayList<>(); // find vertex prop IVertex vertex = graphState.getVertex(id, null, null); - if (null != vertex && null != vertex.getValue()) { + if (null != vertex && null != vertex.getValue() && !"relation".equals(rdfType)) { // 提取属性 log.info("vertex_property,{}", vertex); for (String propertyName : vertex.getValue().getKeySet()) { @@ -56,6 +56,7 @@ public static List getAllRdfEntity( "name", String.valueOf(pValue))); graphState.addVertex(propVertex); LinkedUdtfResult udtfRes = new LinkedUdtfResult(); + udtfRes.getDirection().add(Direction.OUT.name()); udtfRes.setEdgeType(propertyName); udtfRes.getTargetVertexIdList().add(String.valueOf(pValue)); if (pValue instanceof Integer) { @@ -76,6 +77,14 @@ public static List getAllRdfEntity( if (CollectionUtils.isNotEmpty(edgeList)) { for (IEdge edge : edgeList) { Object toIdObj = edge.getValue().get(Constants.EDGE_TO_ID_KEY); + String dir = Direction.OUT.name(); + Object nodeIdObj = vertex.getValue().get(Constants.NODE_ID_KEY); + String targetType = edge.getTargetId().getType(); + if (nodeIdObj.equals(toIdObj)) { + toIdObj = edge.getValue().get(Constants.EDGE_FROM_ID_KEY); + dir = Direction.IN.name(); + targetType = String.valueOf(edge.getValue().get(Constants.EDGE_FROM_ID_TYPE_KEY)); + } if (null == toIdObj) { continue; } @@ -84,7 +93,8 @@ public static List getAllRdfEntity( LinkedUdtfResult udtfRes = new LinkedUdtfResult(); udtfRes.setEdgeType(spo.getP()); udtfRes.getTargetVertexIdList().add(String.valueOf(toIdObj)); - udtfRes.getTargetVertexTypeList().add(edge.getTargetId().getType()); + udtfRes.getTargetVertexTypeList().add(targetType); + udtfRes.getDirection().add(dir); for (String propKey : edge.getValue().getKeySet()) { if (propKey.startsWith("_")) { continue; diff --git a/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/udtf/RdfExpand.java b/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/udtf/RdfExpand.java index d7d4b8e3c..413ed5647 100644 --- a/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/udtf/RdfExpand.java +++ b/server/core/reasoner/service/src/main/java/com/antgroup/openspg/server/core/reasoner/service/udtf/RdfExpand.java @@ -68,6 +68,10 @@ public void process(List args) { String vertexType = null; String bizId = null; Object s = context.get(srcAlias); + String rdfType = null; + if (args.size() > 0) { + rdfType = (String) args.get(0); + } if (s instanceof Map) { Map sMap = (Map) s; bizId = (String) sMap.get(Constants.NODE_ID_KEY); @@ -75,7 +79,7 @@ public void process(List args) { } IVertexId id = new VertexBizId(bizId, vertexType); // 结果 - List validBizIds = Utils.getAllRdfEntity(graphState, id); + List validBizIds = Utils.getAllRdfEntity(graphState, id, rdfType); for (LinkedUdtfResult udtfResult : validBizIds) { forward(Lists.newArrayList(udtfResult)); } diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInfoQuery.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInfoQuery.java new file mode 100644 index 000000000..9fee2bb35 --- /dev/null +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInfoQuery.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.model.query; + +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; +import java.util.Date; +import java.util.List; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerInfoQuery extends SchedulerInfo { + + private static final long serialVersionUID = -1357781188788789871L; + + private List ids; + + private Date startCreateTime; + + private Date endCreateTime; + + private Integer pageNo; + + private Integer pageSize; + + private String sort; + + private String order; +} diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInstanceQuery.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInstanceQuery.java new file mode 100644 index 000000000..ebaaad827 --- /dev/null +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerInstanceQuery.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.model.query; + +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import java.util.Date; +import java.util.List; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerInstanceQuery extends SchedulerInstance { + + private static final long serialVersionUID = -9052713039986838271L; + + private List ids; + + private Date startCreateTime; + + private Date endCreateTime; + + private Integer pageNo; + + private Integer pageSize; + + private String sort; + + private String order; +} diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerJobQuery.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerJobQuery.java new file mode 100644 index 000000000..6621d6fed --- /dev/null +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerJobQuery.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.model.query; + +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import java.util.Date; +import java.util.List; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerJobQuery extends SchedulerJob { + + private static final long serialVersionUID = -6208158596622111679L; + + private List ids; + + private Date startCreateTime; + + private Date endCreateTime; + + private Integer pageNo; + + private Integer pageSize; + + private String sort; + + private String order; +} diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerTaskQuery.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerTaskQuery.java new file mode 100644 index 000000000..2ccba898f --- /dev/null +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/query/SchedulerTaskQuery.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.model.query; + +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import java.util.Date; +import java.util.List; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerTaskQuery extends SchedulerTask { + + private static final long serialVersionUID = -6684463704224261930L; + + private List ids; + + private Date startCreateTime; + + private Date endCreateTime; + + private Integer pageNo; + + private Integer pageSize; + + private String sort; + + private String order; +} diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerHandlerResult.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerHandlerResult.java new file mode 100644 index 000000000..be5beb509 --- /dev/null +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerHandlerResult.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.model.service; + +import com.antgroup.openspg.server.common.model.base.BaseModel; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerHandlerResult extends BaseModel { + + private static final long serialVersionUID = -4839262175495859114L; + + public SchedulerHandlerResult() {} + + public SchedulerHandlerResult(TaskStatus status, String msg) { + this.status = status; + this.msg = msg; + } + + private TaskStatus status; + + private String msg; +} diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfo.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfo.java new file mode 100644 index 000000000..5d785ed62 --- /dev/null +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfo.java @@ -0,0 +1,78 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.model.service; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.server.common.model.base.BaseModel; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.SchedulerInfoStatus; +import com.google.common.collect.Lists; +import java.util.Date; +import java.util.List; +import lombok.Getter; +import lombok.Setter; + +/** Scheduler Info Model */ +@Getter +@Setter +public class SchedulerInfo extends BaseModel { + + private static final long serialVersionUID = 8374591289230111738L; + + public static final String WHITE_IP_KEY = "whiteIps"; + + public static final String HOST_EXCEPTION_TIMEOUT_KEY = "hostExceptionTimeout"; + + /** primary key */ + private Long id; + + /** Create time */ + private Date gmtCreate; + + /** Modified time */ + private Date gmtModified; + + /** name */ + private String name; + + /** status */ + private SchedulerInfoStatus status; + + /** Scheduler period Unit: second */ + private Long period; + + /** execute count */ + private Integer count; + + /** log */ + private List log; + + /** config */ + private JSONObject config; + + /** lock Time */ + private Date lockTime; + + public List getWhiteIps() { + if (config == null || !config.containsKey(WHITE_IP_KEY)) { + return Lists.newArrayList(); + } + return JSONObject.parseArray(config.getString(WHITE_IP_KEY), String.class); + } + + public Long getHostExceptionTimeout() { + if (config == null || !config.containsKey(HOST_EXCEPTION_TIMEOUT_KEY)) { + return 0L; + } + return config.getLong(HOST_EXCEPTION_TIMEOUT_KEY); + } +} diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfoLog.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfoLog.java new file mode 100644 index 000000000..5da347e59 --- /dev/null +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInfoLog.java @@ -0,0 +1,52 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.model.service; + +import com.alibaba.fastjson.annotation.JSONField; +import com.antgroup.openspg.common.util.NetworkAddressUtils; +import com.antgroup.openspg.server.common.model.base.BaseModel; +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerInfoLog extends BaseModel { + + private static final long serialVersionUID = -7380727904455645196L; + + public SchedulerInfoLog() {} + + public SchedulerInfoLog(String status, String elog, Date rt, Date ft) { + this.status = status; + this.elog = elog; + this.rt = rt; + this.ft = ft; + this.ip = NetworkAddressUtils.LOCAL_IP; + } + + private String status; + + /** Exception log */ + private String elog; + + /** trigger time */ + @JSONField(format = "yyyy-MM-dd HH:mm:ss") + private Date rt; + + /** finish time */ + @JSONField(format = "yyyy-MM-dd HH:mm:ss") + private Date ft; + + private String ip; +} diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInstance.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInstance.java index f4204a187..947d51cc6 100644 --- a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInstance.java +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerInstance.java @@ -82,7 +82,4 @@ public class SchedulerInstance extends BaseModel { /** task dag Config */ private TaskExecuteDag taskDag; - - /** start CreateTime Date For Query */ - private transient Date startCreateTime; } diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerTask.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerTask.java index 317ed3b3e..e38f1128e 100644 --- a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerTask.java +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/service/SchedulerTask.java @@ -13,7 +13,6 @@ package com.antgroup.openspg.server.core.scheduler.model.service; import com.alibaba.fastjson.JSONObject; -import com.antgroup.openspg.common.util.DateTimeUtils; import com.antgroup.openspg.server.common.model.base.BaseModel; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; @@ -47,6 +46,9 @@ public class SchedulerTask extends BaseModel { /** status */ private TaskStatus status; + /** project id */ + private Long projectId; + /** SchedulerJob Id */ private Long jobId; @@ -93,6 +95,7 @@ public SchedulerTask(SchedulerInstance instance, TaskStatus status, TaskExecuteD this.executeNum = 0; this.beginTime = new Date(); this.status = status; + this.projectId = instance.getProjectId(); this.jobId = instance.getJobId(); this.instanceId = instance.getId(); this.nodeId = node.getId(); @@ -102,11 +105,6 @@ public SchedulerTask(SchedulerInstance instance, TaskStatus status, TaskExecuteD if (node.getProperties() != null) { this.extension = node.getProperties(); } - - StringBuffer log = new StringBuffer(DateTimeUtils.getDate2LongStr(new Date())); - log.append("Create new Task, Waiting preceding node to complete....."); - log.append(System.getProperty("line.separator")); - - this.traceLog = log.toString(); + this.traceLog = System.getProperty("line.separator"); } } diff --git a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/task/TaskExecuteDag.java b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/task/TaskExecuteDag.java index a704f5b31..281c65def 100644 --- a/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/task/TaskExecuteDag.java +++ b/server/core/scheduler/model/src/main/java/com/antgroup/openspg/server/core/scheduler/model/task/TaskExecuteDag.java @@ -13,9 +13,14 @@ package com.antgroup.openspg.server.core.scheduler.model.task; import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.annotation.JSONField; import com.google.common.collect.Lists; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashSet; import java.util.List; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; import lombok.Setter; @@ -49,6 +54,49 @@ public List getRelatedNodes(String id, boolean next) { .collect(Collectors.toList()); } + @JSONField(serialize = false) + public List getNodesByType(String nodeType) { + List nodes = Lists.newArrayList(); + if (nodeType == null) { + return nodes; + } + for (Node node : this.nodes) { + if (node == null) { + continue; + } + if (nodeType.equals(node.getTaskComponent())) { + nodes.add(node); + } + } + return nodes; + } + + public List getSuccessorNodes(String startNodeId) { + Set visited = new HashSet<>(); + List result = new ArrayList<>(); + dfs(startNodeId, visited, result); + return result; + } + + private void dfs(String nodeId, Set visited, List result) { + if (visited.contains(nodeId)) { + return; + } + visited.add(nodeId); + + List outgoingEdges = + edges.stream().filter(edge -> edge.getFrom().equals(nodeId)).collect(Collectors.toList()); + for (Edge edge : outgoingEdges) { + Optional targetNodeOptional = + nodes.stream().filter(node -> node.getId().equals(edge.getTo())).findFirst(); + if (targetNodeOptional.isPresent()) { + Node targetNode = targetNodeOptional.get(); + result.add(targetNode); + dfs(targetNode.getId(), visited, result); + } + } + } + @Getter @Setter @ToString diff --git a/server/core/scheduler/service/pom.xml b/server/core/scheduler/service/pom.xml index ab110bb5c..aa717406a 100644 --- a/server/core/scheduler/service/pom.xml +++ b/server/core/scheduler/service/pom.xml @@ -39,5 +39,29 @@ org.springframework.boot spring-boot-autoconfigure + + jakarta.annotation + jakarta.annotation-api + + + com.antgroup.openspg.cloudext + cloudext-interface-object-storage + + + com.antgroup.openspg.cloudext + cloudext-impl-objectstorage-minio + + + com.antgroup.openspg.cloudext + cloudext-impl-objectstorage-oss + + + com.antgroup.openspg.cloudext + cloudext-interface-computing-engine + + + com.antgroup.openspg.builder + builder-runner-local + diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/SchedulerService.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/SchedulerService.java index 5c0b5915b..3843cffb9 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/SchedulerService.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/SchedulerService.java @@ -12,10 +12,13 @@ */ package com.antgroup.openspg.server.core.scheduler.service.api; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; -import java.util.List; /** Scheduler Service:submit,execute,delete and other scheduler interfaces */ public interface SchedulerService { @@ -35,13 +38,13 @@ public interface SchedulerService { Boolean deleteJob(Long jobId); /** update Job fields */ - boolean updateJob(SchedulerJob job); + Boolean updateJob(SchedulerJob job); /** get Job details By id */ SchedulerJob getJobById(Long jobId); /** search Jobs by fields */ - List searchJobs(SchedulerJob query); + Paged searchJobs(SchedulerJobQuery query); /** get Instance details By id */ SchedulerInstance getInstanceById(Long instanceId); @@ -59,8 +62,8 @@ public interface SchedulerService { Boolean triggerInstance(Long instanceId); /** search Instances by fields */ - List searchInstances(SchedulerInstance query); + Paged searchInstances(SchedulerInstanceQuery query); /** search Tasks by fields */ - List searchTasks(SchedulerTask query); + Paged searchTasks(SchedulerTaskQuery query); } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/impl/SchedulerServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/impl/SchedulerServiceImpl.java index 684331958..4825e6daf 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/impl/SchedulerServiceImpl.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/api/impl/SchedulerServiceImpl.java @@ -12,11 +12,15 @@ */ package com.antgroup.openspg.server.core.scheduler.service.api.impl; +import com.antgroup.openspg.server.api.facade.Paged; import com.antgroup.openspg.server.common.model.exception.SchedulerException; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.InstanceStatus; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.LifeCycle; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.Status; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; @@ -48,6 +52,7 @@ public class SchedulerServiceImpl implements SchedulerService { new ThreadPoolExecutor(1, 20, 30, TimeUnit.MINUTES, new LinkedBlockingQueue<>(100)); @Autowired SchedulerJobService schedulerJobService; + @Autowired SchedulerInstanceService schedulerInstanceService; @Autowired SchedulerTaskService schedulerTaskService; @Autowired SchedulerCommonService schedulerCommonService; @@ -122,7 +127,7 @@ public Boolean executeJob(Long jobId) { /** stop all not finish instance by job id */ private void stopJobAllInstance(Long jobId) { - SchedulerInstance query = new SchedulerInstance(); + SchedulerInstanceQuery query = new SchedulerInstanceQuery(); query.setJobId(jobId); List instances = schedulerInstanceService.getNotFinishInstance(query); if (CollectionUtils.isEmpty(instances)) { @@ -176,7 +181,7 @@ public Boolean deleteJob(Long jobId) { } @Override - public boolean updateJob(SchedulerJob job) { + public Boolean updateJob(SchedulerJob job) { Long id = schedulerJobService.update(job); return id > 0; } @@ -187,7 +192,7 @@ public SchedulerJob getJobById(Long jobId) { } @Override - public List searchJobs(SchedulerJob query) { + public Paged searchJobs(SchedulerJobQuery query) { return schedulerJobService.query(query); } @@ -234,12 +239,12 @@ public Boolean triggerInstance(Long instanceId) { } @Override - public List searchInstances(SchedulerInstance query) { + public Paged searchInstances(SchedulerInstanceQuery query) { return schedulerInstanceService.query(query); } @Override - public List searchTasks(SchedulerTask query) { + public Paged searchTasks(SchedulerTaskQuery query) { return schedulerTaskService.query(query); } } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/MemoryTaskServer.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/MemoryTaskServer.java new file mode 100644 index 000000000..23c27e1b6 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/MemoryTaskServer.java @@ -0,0 +1,143 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.common; + +import com.antgroup.openspg.common.util.DateTimeUtils; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import java.util.Date; +import java.util.concurrent.Callable; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.springframework.stereotype.Service; + +@Service +@Slf4j +public class MemoryTaskServer { + + private final ConcurrentMap taskMap = new ConcurrentHashMap<>(); + private final ConcurrentMap> futureMap = new ConcurrentHashMap<>(); + + private final ExecutorService executorService = + new ThreadPoolExecutor( + 3, + 10, + 60 * 60, + TimeUnit.SECONDS, + new LinkedBlockingQueue<>(10), + new ThreadPoolExecutor.CallerRunsPolicy());; + + public String submit(MemoryTaskCallable taskCallable, String taskId) { + SchedulerTask taskInfo = new SchedulerTask(); + taskInfo.setNodeId(taskId); + taskInfo.setStatus(SchedulerEnum.TaskStatus.WAIT); + taskMap.put(taskId, taskInfo); + taskCallable.setTask(taskInfo); + + Future future = + CompletableFuture.supplyAsync(() -> executeTask(taskId, taskCallable), executorService); + futureMap.put(taskId, future); + + return taskId; + } + + private String executeTask(String taskId, MemoryTaskCallable taskCallable) { + SchedulerTask taskInfo = taskMap.get(taskId); + taskInfo.setStatus(SchedulerEnum.TaskStatus.RUNNING); + taskInfo.setBeginTime(new Date()); + try { + String result = taskCallable.call(); + taskInfo.setStatus(SchedulerEnum.TaskStatus.FINISH); + taskInfo.setOutput(result); + } catch (Exception e) { + taskInfo.setStatus(SchedulerEnum.TaskStatus.ERROR); + taskInfo.setTraceLog(ExceptionUtils.getStackTrace(e)); + log.error("executeTask Exception", e); + } finally { + taskInfo.setFinishTime(new Date()); + } + return taskId; + } + + public SchedulerTask getTask(String taskId) { + return taskMap.get(taskId); + } + + public boolean stopTask(String taskId) { + try { + Future future = futureMap.get(taskId); + if (future != null && !future.isDone()) { + boolean cancelled = future.cancel(true); + if (cancelled) { + SchedulerTask taskInfo = taskMap.get(taskId); + taskInfo.setStatus(SchedulerEnum.TaskStatus.TERMINATE); + taskMap.put(taskId, taskInfo); + futureMap.remove(taskId); + return true; + } + } + return false; + } catch (Exception e) { + log.error("stopTask Exception", e); + return false; + } finally { + taskMap.remove(taskId); + futureMap.remove(taskId); + } + } + + public ConcurrentMap getAllTasks() { + return taskMap; + } + + public abstract static class MemoryTaskCallable implements Callable { + + private SchedulerTask task; + + private StringBuffer traceLog = new StringBuffer(); + + public SchedulerTask getTask() { + return task; + } + + public void setTask(SchedulerTask task) { + this.task = task; + } + + public StringBuffer getTraceLog() { + return traceLog; + } + + public void addTraceLog(String message, Object... args) { + int dstOffset = 0; + StringBuffer log = new StringBuffer(" >> "); + log.append(DateTimeUtils.getDate2Str("HH:mm:ss", new Date())) + .append("(Task): ") + .append(String.format(message, args)) + .append(System.getProperty("line.separator")); + traceLog.insert(dstOffset, log); + task.setTraceLog(traceLog.toString()); + } + + @Override + public abstract T call() throws Exception; + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/SchedulerCommonService.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/SchedulerCommonService.java index 51f7f5db8..9c0b74167 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/SchedulerCommonService.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/common/SchedulerCommonService.java @@ -16,6 +16,7 @@ import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.InstanceStatus; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; import com.antgroup.openspg.server.common.service.spring.SpringContextHolder; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; @@ -32,6 +33,8 @@ import com.google.common.collect.Lists; import java.util.Date; import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -49,6 +52,7 @@ public class SchedulerCommonService { public static final Long FINISH = 100L; @Autowired SchedulerJobService schedulerJobService; + @Autowired SchedulerInstanceService schedulerInstanceService; @Autowired SchedulerTaskService schedulerTaskService; @Autowired SchedulerConfig schedulerConfig; @@ -67,6 +71,32 @@ public void setInstanceFinish( stopRunningTasks(instance); schedulerTaskService.setStatusByInstanceId(instance.getId(), taskStatus); + SchedulerJob job = schedulerJobService.getById(instance.getJobId()); + TranslatorFactory.getTranslator(job.getTranslateType()) + .statusCallback(job, instance, instanceStatus); + } + + /** Rerun all tasks after the specified task */ + public void RerunFromTask(SchedulerInstance instance, String taskType) { + TaskExecuteDag taskDag = instance.getTaskDag(); + List nodes = taskDag.getNodesByType(taskType); + List tasks = schedulerTaskService.queryByInstanceId(instance.getId()); + Map taskMap = + tasks.stream() + .collect(Collectors.toMap(SchedulerTask::getNodeId, SchedulerTask -> SchedulerTask)); + for (TaskExecuteDag.Node node : nodes) { + SchedulerTask task = taskMap.get(node.getId()); + SchedulerTask runningTask = new SchedulerTask(instance, TaskStatus.RUNNING, node); + runningTask.setId(task.getId()); + schedulerTaskService.update(runningTask); + List subsequentNodes = taskDag.getSuccessorNodes(node.getId()); + for (TaskExecuteDag.Node subsequentNode : subsequentNodes) { + SchedulerTask subsequentTask = taskMap.get(subsequentNode.getId()); + SchedulerTask waitTask = new SchedulerTask(instance, TaskStatus.WAIT, node); + waitTask.setId(subsequentTask.getId()); + schedulerTaskService.update(waitTask); + } + } } /** stop all running tasks by instance */ @@ -76,34 +106,37 @@ private void stopRunningTasks(SchedulerInstance instance) { SchedulerJob job = schedulerJobService.getById(instance.getJobId()); for (SchedulerTask task : taskList) { - // Filter non-running tasks - if (!TaskStatus.isRunning(task.getStatus()) || StringUtils.isBlank(task.getType())) { - continue; - } - - // get AsyncTaskExecute by type - String type = task.getType().split(UNDERLINE_SEPARATOR)[0]; - TaskExecute jobTask = SpringContextHolder.getBean(type, TaskExecute.class); - boolean isAsyncTask = (jobTask != null && jobTask instanceof AsyncTaskExecute); - if (!isAsyncTask) { - log.warn("get bean is null or not instance of JobAsyncTask id: {}", task.getId()); - continue; - } + stopRunningTask(job, instance, task); + } + } - // transform to jobAsyncTask trigger stop - AsyncTaskExecute jobAsyncTask = (AsyncTaskExecute) jobTask; - TaskExecuteContext context = new TaskExecuteContext(job, instance, task); - jobAsyncTask.stop(context, task.getResource()); + /** stop running task */ + private void stopRunningTask(SchedulerJob job, SchedulerInstance instance, SchedulerTask task) { + // Filter non-running tasks + if (!TaskStatus.isRunning(task.getStatus()) || StringUtils.isBlank(task.getType())) { + return; + } + // get AsyncTaskExecute by type + String type = task.getType().split(UNDERLINE_SEPARATOR)[0]; + TaskExecute jobTask = SpringContextHolder.getBean(type, TaskExecute.class); + boolean isAsyncTask = (jobTask != null && jobTask instanceof AsyncTaskExecute); + if (!isAsyncTask) { + log.warn("get bean is null or not instance of JobAsyncTask id: {}", task.getId()); + return; } + // transform to jobAsyncTask trigger stop + AsyncTaskExecute jobAsyncTask = (AsyncTaskExecute) jobTask; + TaskExecuteContext context = new TaskExecuteContext(job, instance, task); + jobAsyncTask.stop(context, task.getResource()); } /** check Instance is Running within 24H */ private void checkInstanceRunning(SchedulerJob job) { - SchedulerInstance query = new SchedulerInstance(); + SchedulerInstanceQuery query = new SchedulerInstanceQuery(); query.setJobId(job.getId()); query.setStartCreateTime(DateUtils.addDays(new Date(), -1)); - List instances = schedulerInstanceService.query(query); + List instances = schedulerInstanceService.query(query).getResults(); for (SchedulerInstance instance : instances) { if (!InstanceStatus.isFinished(instance.getStatus())) { throw new SchedulerException("Running {} exist within 24H", instance.getUniqueId()); diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/config/SchedulerConfig.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/config/SchedulerConfig.java index 123eeb491..1c1bb12a0 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/config/SchedulerConfig.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/config/SchedulerConfig.java @@ -18,7 +18,6 @@ package com.antgroup.openspg.server.core.scheduler.service.config; import com.antgroup.openspg.common.util.StringUtils; -import java.util.concurrent.TimeUnit; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @@ -26,47 +25,17 @@ @Component public class SchedulerConfig { - @Value("${scheduler.execute.instances.period:}") - private String executeInstancesPeriod; - - @Value("${scheduler.execute.instances.unit:}") - private String executeInstancesUnit; - - @Value("${scheduler.generate.instances.period:}") - private String generateInstancesPeriod; - - @Value("${scheduler.generate.instances.unit:}") - private String generateInstancesUnit; + @Value("${scheduler.handler.process.period:}") + private String handlerProcessPeriod; @Value("${scheduler.execute.max.day:}") private String executeMaxDay; - public Long getExecuteInstancesPeriod() { - if (StringUtils.isBlank(executeInstancesPeriod)) { - return null; - } - return Long.valueOf(executeInstancesPeriod); - } - - public TimeUnit getExecuteInstancesUnit() { - if (StringUtils.isBlank(executeInstancesPeriod)) { - return null; - } - return TimeUnit.valueOf(executeInstancesUnit); - } - - public Long getGenerateInstancesPeriod() { - if (StringUtils.isBlank(generateInstancesPeriod)) { - return null; - } - return Long.valueOf(generateInstancesPeriod); - } - - public TimeUnit getGenerateInstancesUnit() { - if (StringUtils.isBlank(generateInstancesUnit)) { + public Long getHandlerProcessPeriod() { + if (StringUtils.isBlank(handlerProcessPeriod)) { return null; } - return TimeUnit.valueOf(generateInstancesUnit); + return Long.valueOf(handlerProcessPeriod); } public Integer getExecuteMaxDay() { diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/engine/impl/SchedulerExecuteServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/engine/impl/SchedulerExecuteServiceImpl.java index 81d4f7b58..7f5982a4f 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/engine/impl/SchedulerExecuteServiceImpl.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/engine/impl/SchedulerExecuteServiceImpl.java @@ -16,6 +16,8 @@ import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.InstanceStatus; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; import com.antgroup.openspg.server.common.service.spring.SpringContextHolder; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; @@ -59,7 +61,9 @@ public class SchedulerExecuteServiceImpl implements SchedulerExecuteService { private ScheduledExecutorService executorService = new ScheduledThreadPoolExecutor(10); @Autowired SchedulerConfig schedulerConfig; + @Autowired SchedulerJobService schedulerJobService; + @Autowired SchedulerInstanceService schedulerInstanceService; @Autowired SchedulerTaskService schedulerTaskService; @Autowired SchedulerCommonService schedulerCommonService; @@ -67,10 +71,10 @@ public class SchedulerExecuteServiceImpl implements SchedulerExecuteService { /** generate instances by period job */ @Override public void generateInstances() { - SchedulerJob record = new SchedulerJob(); + SchedulerJobQuery record = new SchedulerJobQuery(); record.setLifeCycle(SchedulerEnum.LifeCycle.PERIOD); record.setStatus(SchedulerEnum.Status.ENABLE); - List allJob = schedulerJobService.query(record); + List allJob = schedulerJobService.query(record).getResults(); log.info("getAllPeriodJob successful size:{}", allJob.size()); if (CollectionUtils.isEmpty(allJob)) { @@ -179,8 +183,7 @@ private void executeNextTask(TaskExecuteContext context) { // execute all next task for (TaskExecuteDag.Node nextNode : nextNodes) { taskList.add( - schedulerTaskService.queryByInstanceIdAndType( - instance.getId(), nextNode.getTaskComponent())); + schedulerTaskService.queryByInstanceIdAndNodeId(instance.getId(), nextNode.getId())); } SchedulerInstance ins = schedulerInstanceService.getById(instance.getId()); Runnable instanceRunnable = () -> executeInstance(ins, taskList); @@ -217,8 +220,7 @@ private List checkAndUpdateWaitStatus( /** check all nodes is finished */ private boolean checkAllNodesFinished(Long instanceId, List nodes) { for (TaskExecuteDag.Node node : nodes) { - SchedulerTask t = - schedulerTaskService.queryByInstanceIdAndType(instanceId, node.getTaskComponent()); + SchedulerTask t = schedulerTaskService.queryByInstanceIdAndNodeId(instanceId, node.getId()); if (!TaskStatus.isFinished(t.getStatus())) { return false; } @@ -228,7 +230,7 @@ private boolean checkAllNodesFinished(Long instanceId, List /** get all not finish instances */ private List getAllNotFinishInstances() { - SchedulerInstance record = new SchedulerInstance(); + SchedulerInstanceQuery record = new SchedulerInstanceQuery(); Integer maxDays = schedulerConfig.getExecuteMaxDay() + 1; Date startDate = DateUtils.addDays(new Date(), -maxDays); record.setStartCreateTime(startDate); diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/SchedulerHandler.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/SchedulerHandler.java index 6e2156236..9ca2062fc 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/SchedulerHandler.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/SchedulerHandler.java @@ -12,12 +12,14 @@ */ package com.antgroup.openspg.server.core.scheduler.service.handler; +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerHandlerResult; + /** Scheduler Handler. To generate and execute Instances */ public interface SchedulerHandler { - /** scheduler timer entrance. execute Instances */ - void executeInstances(); + SchedulerHandlerResult process(JSONObject params); - /** scheduler generate Instances timer */ - void generateInstances(); + /** @return Scheduling time interval Unit: seconds */ + Long getPeriod(); } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/db/SchedulerHandlerClient.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/db/SchedulerHandlerClient.java new file mode 100644 index 000000000..4e7d46098 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/db/SchedulerHandlerClient.java @@ -0,0 +1,296 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.handler.client.db; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.DateTimeUtils; +import com.antgroup.openspg.common.util.NetworkAddressUtils; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.SchedulerInfoStatus; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; +import com.antgroup.openspg.server.common.service.spring.SpringContextHolder; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInfoQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerHandlerResult; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfoLog; +import com.antgroup.openspg.server.core.scheduler.service.config.SchedulerConfig; +import com.antgroup.openspg.server.core.scheduler.service.handler.SchedulerHandler; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerInfoService; +import com.google.common.collect.Lists; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.collections4.MapUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.commons.lang3.time.DateUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Service; + +/** Scheduler Handler DB implementation class. To generate and execute Instances */ +@Service +@Slf4j +@ConditionalOnProperty(name = "scheduler.handler.type", havingValue = "db") +public class SchedulerHandlerClient { + + private static final int corePoolSize = 1; + private static final long initialDelay = 0; + public static final Integer LOCK_TIME_MINUTES = 10; + + private static final int LOGGER_MAX_COUNT = 3; + + @Autowired SchedulerConfig schedulerConfig; + @Autowired SchedulerInfoService schedulerInfoService; + + @EventListener(ApplicationReadyEvent.class) + public void init() { + insertSchedulerToDB(); + log.info("Init DB Scheduler Handler"); + List schedulerInfos = + schedulerInfoService.query(new SchedulerInfoQuery()).getResults(); + log.info("Number of DB schedulerHandler:{}", schedulerInfos.size()); + if (CollectionUtils.isEmpty(schedulerInfos)) { + log.info("The schedulerHandler has not in DB, no registration is required!"); + return; + } + for (SchedulerInfo info : schedulerInfos) { + try { + SchedulerHandler handler = + SpringContextHolder.getBean(info.getName(), SchedulerHandler.class); + if (handler == null) { + log.error("schedulerHandler bean not found name:{}", info.getName()); + continue; + } + ScheduledThreadPoolExecutor executor = + new ScheduledThreadPoolExecutor( + corePoolSize, + (runnable) -> { + Thread thread = new Thread(runnable); + thread.setDaemon(true); + thread.setName("dbSchedule-" + thread.getId()); + return thread; + }); + executor.scheduleAtFixedRate( + new SchedulerHandlerRunnable(handler, info.getId(), info.getName()), + initialDelay, + info.getPeriod(), + TimeUnit.SECONDS); + } catch (Exception e) { + log.error("Executor DB SchedulerHandler Exception:" + info.getName(), e); + } + } + } + + public void insertSchedulerToDB() { + log.info("get Scheduler Handler Beans"); + Map schedulerHandlers = + SpringContextHolder.getBeanMap(SchedulerHandler.class); + log.info("Number of registered schedulerHandler:{}", schedulerHandlers.size()); + if (MapUtils.isEmpty(schedulerHandlers)) { + return; + } + for (String name : schedulerHandlers.keySet()) { + try { + SchedulerHandler schedulerHandler = schedulerHandlers.get(name); + Long period = + (schedulerHandler.getPeriod() == null || schedulerHandler.getPeriod() <= 0) + ? schedulerConfig.getHandlerProcessPeriod() + : schedulerHandler.getPeriod(); + SchedulerInfo schedulerInfo = schedulerInfoService.getByName(name); + if (schedulerInfo != null) { + continue; + } + schedulerInfo = new SchedulerInfo(); + schedulerInfo.setGmtCreate(new Date()); + schedulerInfo.setGmtModified(new Date()); + schedulerInfo.setName(name); + schedulerInfo.setStatus(SchedulerEnum.SchedulerInfoStatus.RUNNING); + schedulerInfo.setPeriod(period); + schedulerInfo.setCount(0); + JSONObject config = new JSONObject(); + config.put(SchedulerInfo.HOST_EXCEPTION_TIMEOUT_KEY, 300); + schedulerInfo.setConfig(config); + schedulerInfoService.insert(schedulerInfo); + } catch (Exception e) { + log.error("insert Scheduler Handler Exception:" + name, e); + } + } + } + + class SchedulerHandlerRunnable implements Runnable { + SchedulerHandler handler; + String handlerName; + Long handlerId; + + public SchedulerHandlerRunnable(SchedulerHandler handler, Long handlerId, String handlerName) { + this.handler = handler; + this.handlerId = handlerId; + this.handlerName = handlerName; + } + + @Override + public void run() { + try { + log.info("Start DB SchedulerHandler:{}", handlerName); + triggerJob(); + } catch (Exception e) { + log.error("DB SchedulerHandler process Exception", e); + } + } + + private Boolean triggerJob() { + Date rt = new Date(); + Boolean lockStatus = true; + try { + SchedulerInfo schedulerInfo = schedulerInfoService.getById(handlerId); + String hostAddress = NetworkAddressUtils.LOCAL_IP; + List whiteIps = schedulerInfo.getWhiteIps(); + if (CollectionUtils.isNotEmpty(whiteIps) && !whiteIps.contains(hostAddress)) { + log.info("The native ip is not in the whitelist, stop triggering"); + return false; + } + + lockStatus = lockTask(handlerId); + if (!lockStatus) { + log.info("get scheduler lock failed {} {}", handlerName, hostAddress); + return false; + } + log.info("get scheduler lock success {} {}", handlerName, hostAddress); + boolean result = canExecuteJob(handlerName, schedulerInfo); + log.info("can execute result {} {}", result, handlerName); + if (!result) { + return false; + } + SchedulerHandlerResult process = handler.process(new JSONObject()); + SchedulerInfoLog infoLog = + new SchedulerInfoLog(process.getStatus().name(), process.getMsg(), rt, new Date()); + updateSchedulerInfoLog(handlerId, process.getStatus(), infoLog); + return true; + } catch (Exception e) { + log.error(handlerName + " process failed", e); + SchedulerInfoLog infoLog = + new SchedulerInfoLog( + TaskStatus.ERROR.name(), ExceptionUtils.getStackTrace(e), rt, new Date()); + boolean success = updateSchedulerInfoLog(handlerId, TaskStatus.ERROR, infoLog); + log.info("update error info {} {}", success, handlerName); + return false; + } finally { + unlockTask(handlerId, lockStatus); + } + } + + private boolean canExecuteJob(String name, SchedulerInfo schedulerInfo) { + List infoLogs = schedulerInfo.getLog(); + if (CollectionUtils.isEmpty(infoLogs)) { + return true; + } + Date nowDate = new Date(); + if (SchedulerInfoStatus.RUNNING.equals(schedulerInfo.getStatus())) { + Long hostExceptionTimeout = schedulerInfo.getHostExceptionTimeout(); + if (hostExceptionTimeout != null + && nowDate.getTime() - schedulerInfo.getGmtModified().getTime() + >= hostExceptionTimeout * 1000) { + log.info("running and timeout to pull again {} {}", name, hostExceptionTimeout); + return true; + } + } + SchedulerInfoLog schedulerLog = infoLogs.get(infoLogs.size() - 1); + if (SchedulerInfoStatus.WAIT.equals(schedulerInfo.getStatus()) + && nowDate.getTime() - schedulerLog.getRt().getTime() + >= schedulerInfo.getPeriod() * 1000) { + SchedulerInfo infoDTO = new SchedulerInfo(); + infoDTO.setId(schedulerInfo.getId()); + infoDTO.setStatus(SchedulerInfoStatus.RUNNING); + schedulerInfoService.update(infoDTO); + return true; + } + return false; + } + + public boolean updateSchedulerInfoLog(Long id, TaskStatus status, SchedulerInfoLog infoLog) { + SchedulerInfo schedulerInfo = schedulerInfoService.getById(id); + if (null == schedulerInfo) { + log.warn("no scheduler info {} ", id); + return false; + } + if (TaskStatus.FINISH.equals(status)) { + schedulerInfo.setCount(0); + } else { + schedulerInfo.setCount(schedulerInfo.getCount() + 1); + } + schedulerInfo.setStatus(SchedulerInfoStatus.WAIT); + schedulerInfo.setLog(buildNewLogList(schedulerInfo.getLog(), infoLog)); + Long count = schedulerInfoService.update(schedulerInfo); + return count > 0; + } + + private List buildNewLogList( + List logList, SchedulerInfoLog infoLog) { + if (null == logList) { + logList = Lists.newArrayList(); + } + if (logList.size() >= LOGGER_MAX_COUNT) { + logList.remove(0); + } + logList.add(infoLog); + return logList; + } + + private boolean lockTask(Long id) { + SchedulerInfo info = schedulerInfoService.getById(id); + if (info.getLockTime() == null) { + if (schedulerInfoService.updateLock(id) < 1) { + log.warn("Failed to preempt scheduler lock, the lock is already occupied!"); + return false; + } + return true; + } + + Date now = new Date(); + Date unLockTime = DateUtils.addMinutes(info.getLockTime(), LOCK_TIME_MINUTES); + if (now.before(unLockTime)) { + log.info( + "Last scheduler lock preempt time:{},The threshold was not exceeded. Wait for the execution to complete", + DateTimeUtils.getDate2LongStr(info.getLockTime())); + return false; + } + + // Timeout release lock + log.info( + "Last scheduler lock preempt time:{}, The threshold was exceeded. The current process is executed directly", + DateTimeUtils.getDate2LongStr(info.getLockTime())); + unlockTask(id, true); + if (schedulerInfoService.updateLock(id) < 1) { + log.warn("Failed to re-preempt scheduler lock!"); + return false; + } + log.info("Re-preempt scheduler lock successfully!"); + return true; + } + + /** Release lock after scheduling is completed */ + private void unlockTask(Long id, boolean lock) { + if (!lock) { + return; + } + schedulerInfoService.updateUnlock(id); + log.info("Scheduler lock released successfully!"); + } + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/local/LocalSchedulerHandlerClient.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/local/LocalSchedulerHandlerClient.java new file mode 100644 index 000000000..fb37d9ad8 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/client/local/LocalSchedulerHandlerClient.java @@ -0,0 +1,90 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.handler.client.local; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.server.common.service.spring.SpringContextHolder; +import com.antgroup.openspg.server.core.scheduler.service.config.SchedulerConfig; +import com.antgroup.openspg.server.core.scheduler.service.handler.SchedulerHandler; +import java.util.List; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.collections4.CollectionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.event.ApplicationReadyEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Service; + +/** Scheduler Handler Local implementation class. To generate and execute Instances */ +@Service +@Slf4j +@ConditionalOnProperty(name = "scheduler.handler.type", havingValue = "local") +public class LocalSchedulerHandlerClient { + + private static final int corePoolSize = 1; + private static final long initialDelay = 0; + + @Autowired SchedulerConfig schedulerConfig; + + @EventListener(ApplicationReadyEvent.class) + public void init() { + log.info("Init Local Scheduler Handler"); + List schedulerHandlers = SpringContextHolder.getBeans(SchedulerHandler.class); + log.info("Number of registered schedulerHandler:{}", schedulerHandlers.size()); + if (CollectionUtils.isEmpty(schedulerHandlers)) { + log.info("The schedulerHandler has not been scanned, no registration is required!"); + return; + } + for (SchedulerHandler handler : schedulerHandlers) { + try { + Long period = + (handler.getPeriod() == null || handler.getPeriod() <= 0) + ? schedulerConfig.getHandlerProcessPeriod() + : handler.getPeriod(); + ScheduledThreadPoolExecutor executor = + new ScheduledThreadPoolExecutor( + corePoolSize, + (runnable) -> { + Thread thread = new Thread(runnable); + thread.setDaemon(true); + thread.setName("localSchedule-" + thread.getId()); + return thread; + }); + executor.scheduleAtFixedRate( + new SchedulerHandlerRunnable(handler), initialDelay, period, TimeUnit.SECONDS); + } catch (Exception e) { + log.error("Executor SchedulerHandler Exception:" + handler.getClass().getSimpleName(), e); + } + } + } + + class SchedulerHandlerRunnable implements Runnable { + SchedulerHandler handler; + + public SchedulerHandlerRunnable(SchedulerHandler handler) { + this.handler = handler; + } + + @Override + public void run() { + try { + log.info("start SchedulerHandler:{}", handler.getClass().getSimpleName()); + handler.process(new JSONObject()); + } catch (Exception e) { + log.error("SchedulerHandler process Exception", e); + } + } + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/ExecuteInstanceScheduleHandler.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/ExecuteInstanceScheduleHandler.java new file mode 100644 index 000000000..08a59eedb --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/ExecuteInstanceScheduleHandler.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.handler.impl; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerHandlerResult; +import com.antgroup.openspg.server.core.scheduler.service.engine.SchedulerExecuteService; +import com.antgroup.openspg.server.core.scheduler.service.handler.SchedulerHandler; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Slf4j +@Component("executeInstanceScheduleHandler") +public class ExecuteInstanceScheduleHandler implements SchedulerHandler { + + @Autowired SchedulerExecuteService schedulerExecuteService; + + @Override + public SchedulerHandlerResult process(JSONObject jobContext) { + long startTime = System.currentTimeMillis(); + try { + schedulerExecuteService.executeInstances(); + Long time = System.currentTimeMillis() - startTime; + log.info("run {} end time:{}", this.getClass().getSimpleName(), time); + return new SchedulerHandlerResult(TaskStatus.FINISH, TaskStatus.FINISH.name()); + } catch (Exception e) { + log.error("run {} Exception", this.getClass().getSimpleName(), e); + return new SchedulerHandlerResult(TaskStatus.ERROR, ExceptionUtils.getStackTrace(e)); + } + } + + @Override + public Long getPeriod() { + return 60L; + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/GenerateInstanceScheduleHandler.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/GenerateInstanceScheduleHandler.java new file mode 100644 index 000000000..241d63571 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/GenerateInstanceScheduleHandler.java @@ -0,0 +1,51 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.handler.impl; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerHandlerResult; +import com.antgroup.openspg.server.core.scheduler.service.engine.SchedulerExecuteService; +import com.antgroup.openspg.server.core.scheduler.service.handler.SchedulerHandler; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Slf4j +@Component("generateInstanceScheduleHandler") +public class GenerateInstanceScheduleHandler implements SchedulerHandler { + + @Autowired SchedulerExecuteService schedulerExecuteService; + + @Override + public SchedulerHandlerResult process(JSONObject jobContext) { + long startTime = System.currentTimeMillis(); + try { + schedulerExecuteService.generateInstances(); + Long time = System.currentTimeMillis() - startTime; + log.info("run {} end time:{}", this.getClass().getSimpleName(), time); + return new SchedulerHandlerResult( + SchedulerEnum.TaskStatus.FINISH, SchedulerEnum.TaskStatus.FINISH.name()); + } catch (Exception e) { + log.error("run {} Exception", this.getClass().getSimpleName(), e); + return new SchedulerHandlerResult( + SchedulerEnum.TaskStatus.ERROR, ExceptionUtils.getStackTrace(e)); + } + } + + @Override + public Long getPeriod() { + return 60L; + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/local/LocalSchedulerHandler.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/local/LocalSchedulerHandler.java deleted file mode 100644 index 317657b04..000000000 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/handler/impl/local/LocalSchedulerHandler.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2023 OpenSPG Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. - */ -package com.antgroup.openspg.server.core.scheduler.service.handler.impl.local; - -import com.antgroup.openspg.server.core.scheduler.service.config.SchedulerConfig; -import com.antgroup.openspg.server.core.scheduler.service.engine.SchedulerExecuteService; -import com.antgroup.openspg.server.core.scheduler.service.handler.SchedulerHandler; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledThreadPoolExecutor; -import javax.annotation.PostConstruct; -import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.stereotype.Service; - -/** Scheduler Handler Local implementation class. To generate and execute Instances */ -@Service -@Slf4j -@ConditionalOnProperty(name = "scheduler.handler.type", havingValue = "local") -public class LocalSchedulerHandler implements SchedulerHandler { - - private static final int corePoolSize = 1; - private static final long initialDelay = 0; - - private static ScheduledExecutorService EXECUTE = new ScheduledThreadPoolExecutor(corePoolSize); - private static ScheduledExecutorService GENERATE = new ScheduledThreadPoolExecutor(corePoolSize); - - @Autowired SchedulerConfig schedulerConfig; - @Autowired SchedulerExecuteService schedulerExecuteService; - - @Override - @PostConstruct - public void executeInstances() { - log.info("start executeInstances"); - EXECUTE.scheduleAtFixedRate( - new ExecuteRunnable(), - initialDelay, - schedulerConfig.getExecuteInstancesPeriod(), - schedulerConfig.getExecuteInstancesUnit()); - } - - @Override - @PostConstruct - public void generateInstances() { - log.info("start generateInstances"); - GENERATE.scheduleAtFixedRate( - new GenerateRunnable(), - initialDelay, - schedulerConfig.getGenerateInstancesPeriod(), - schedulerConfig.getGenerateInstancesUnit()); - } - - /** Execute Instances Runnable */ - class ExecuteRunnable implements Runnable { - @Override - public void run() { - try { - Long startTime = System.currentTimeMillis(); - schedulerExecuteService.executeInstances(); - Long time = System.currentTimeMillis() - startTime; - log.info("run ExecuteInstances end time:{}", time); - } catch (Exception e) { - log.error("run ExecuteInstances Exception", e); - } - } - } - - /** Generate Instances Runnable */ - class GenerateRunnable implements Runnable { - @Override - public void run() { - try { - Long startTime = System.currentTimeMillis(); - schedulerExecuteService.generateInstances(); - Long time = System.currentTimeMillis() - startTime; - log.info("run GenerateInstances end time:{}", time); - } catch (Exception e) { - log.error("run GenerateInstances Exception", e); - } - } - } -} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInfoService.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInfoService.java new file mode 100644 index 000000000..04a903a9e --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInfoService.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.metadata; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInfoQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; + +/** Scheduler info Service: Add, delete, update, and query tasks */ +public interface SchedulerInfoService { + + /** insert info */ + Long insert(SchedulerInfo record); + + /** update By Id */ + Long update(SchedulerInfo record); + + /** delete By Id */ + int deleteById(Long id); + + /** get By id */ + SchedulerInfo getById(Long id); + + /** get By name */ + SchedulerInfo getByName(String name); + + /** query By Condition */ + Paged query(SchedulerInfoQuery record); + + /** update Lock */ + int updateLock(Long id); + + /** update Unlock */ + int updateUnlock(Long id); +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInstanceService.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInstanceService.java index 9ccfe2f9d..06eaf74aa 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInstanceService.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerInstanceService.java @@ -12,6 +12,8 @@ */ package com.antgroup.openspg.server.core.scheduler.service.metadata; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import java.util.List; @@ -31,11 +33,11 @@ public interface SchedulerInstanceService { SchedulerInstance getById(Long id); /** get By instanceId */ - SchedulerInstance getByUniqueId(String instanceId); + SchedulerInstance getByUniqueId(String uniqueId); /** query By Condition */ - List query(SchedulerInstance record); + Paged query(SchedulerInstanceQuery record); /** get Not Finish Instance */ - List getNotFinishInstance(SchedulerInstance record); + List getNotFinishInstance(SchedulerInstanceQuery record); } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerJobService.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerJobService.java index ea512362e..56d199dbc 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerJobService.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerJobService.java @@ -12,8 +12,9 @@ */ package com.antgroup.openspg.server.core.scheduler.service.metadata; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; -import java.util.List; /** Scheduler Job Service: Add, delete, update, and query Jobs */ public interface SchedulerJobService { @@ -31,5 +32,5 @@ public interface SchedulerJobService { SchedulerJob getById(Long id); /** query By Condition */ - List query(SchedulerJob record); + Paged query(SchedulerJobQuery record); } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerTaskService.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerTaskService.java index 1c4b4e252..1fd50b533 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerTaskService.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/SchedulerTaskService.java @@ -12,7 +12,9 @@ */ package com.antgroup.openspg.server.core.scheduler.service.metadata; +import com.antgroup.openspg.server.api.facade.Paged; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; import java.util.List; @@ -35,10 +37,10 @@ public interface SchedulerTaskService { SchedulerTask getById(Long id); /** query By Condition */ - List query(SchedulerTask record); + Paged query(SchedulerTaskQuery record); - /** query By InstanceId And Type */ - SchedulerTask queryByInstanceIdAndType(Long instanceId, String type); + /** query By InstanceId And NodeId */ + SchedulerTask queryByInstanceIdAndNodeId(Long instanceId, String nodeId); /** query By InstanceId */ List queryByInstanceId(Long instanceId); diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInfoServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInfoServiceImpl.java new file mode 100644 index 000000000..df55a4b28 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInfoServiceImpl.java @@ -0,0 +1,70 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.db; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInfoQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerInfoService; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerInfoRepository; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Service; + +/** Scheduler Info Service implementation class: Add, delete, update, and query infos */ +@Service +@ConditionalOnProperty(name = "scheduler.metadata.store.type", havingValue = "db") +public class SchedulerInfoServiceImpl implements SchedulerInfoService { + + @Autowired private SchedulerInfoRepository schedulerInfoRepository; + + @Override + public Long insert(SchedulerInfo record) { + return schedulerInfoRepository.insert(record); + } + + @Override + public synchronized Long update(SchedulerInfo record) { + return schedulerInfoRepository.update(record); + } + + @Override + public synchronized int deleteById(Long id) { + return schedulerInfoRepository.deleteById(id); + } + + @Override + public SchedulerInfo getById(Long id) { + return schedulerInfoRepository.getById(id); + } + + @Override + public SchedulerInfo getByName(String name) { + return schedulerInfoRepository.getByName(name); + } + + @Override + public Paged query(SchedulerInfoQuery record) { + return schedulerInfoRepository.query(record); + } + + @Override + public int updateLock(Long id) { + return schedulerInfoRepository.updateLock(id); + } + + @Override + public int updateUnlock(Long id) { + return schedulerInfoRepository.updateUnlock(id); + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInstanceServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInstanceServiceImpl.java new file mode 100644 index 000000000..3c4aae897 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerInstanceServiceImpl.java @@ -0,0 +1,78 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.db; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.exception.SchedulerException; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.InstanceStatus; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerInstanceService; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerInstanceRepository; +import java.util.List; +import java.util.stream.Collectors; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Service; + +/** Scheduler Instance Service implementation class: Add, delete, update, and query instances */ +@Service +@ConditionalOnProperty(name = "scheduler.metadata.store.type", havingValue = "db") +public class SchedulerInstanceServiceImpl implements SchedulerInstanceService { + + @Autowired private SchedulerInstanceRepository schedulerInstanceRepository; + + @Override + public Long insert(SchedulerInstance record) { + String uniqueId = record.getUniqueId(); + if (schedulerInstanceRepository.getByUniqueId(uniqueId) != null) { + throw new SchedulerException("uniqueId {} already existed", uniqueId); + } + return schedulerInstanceRepository.insert(record); + } + + @Override + public int deleteByJobId(Long jobId) { + return schedulerInstanceRepository.deleteByJobId(jobId); + } + + @Override + public Long update(SchedulerInstance record) { + return schedulerInstanceRepository.update(record); + } + + @Override + public SchedulerInstance getById(Long id) { + return schedulerInstanceRepository.getById(id); + } + + @Override + public SchedulerInstance getByUniqueId(String uniqueId) { + return schedulerInstanceRepository.getByUniqueId(uniqueId); + } + + @Override + public Paged query(SchedulerInstanceQuery record) { + return schedulerInstanceRepository.query(record); + } + + @Override + public List getNotFinishInstance(SchedulerInstanceQuery record) { + List instanceList = query(record).getResults(); + instanceList = + instanceList.stream() + .filter(s -> !InstanceStatus.isFinished(s.getStatus())) + .collect(Collectors.toList()); + return instanceList; + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerJobServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerJobServiceImpl.java new file mode 100644 index 000000000..62f0f5a10 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerJobServiceImpl.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.db; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerJobService; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerJobRepository; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Service; + +/** Scheduler Job Service implementation class: Add, delete, update, and query Jobs */ +@Service +@ConditionalOnProperty(name = "scheduler.metadata.store.type", havingValue = "db") +public class SchedulerJobServiceImpl implements SchedulerJobService { + + @Autowired private SchedulerJobRepository schedulerJobRepository; + + @Override + public Long insert(SchedulerJob record) { + return schedulerJobRepository.insert(record); + } + + @Override + public int deleteById(Long id) { + return schedulerJobRepository.deleteById(id); + } + + @Override + public Long update(SchedulerJob record) { + return schedulerJobRepository.update(record); + } + + @Override + public SchedulerJob getById(Long id) { + return schedulerJobRepository.getById(id); + } + + @Override + public Paged query(SchedulerJobQuery record) { + return schedulerJobRepository.query(record); + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerTaskServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerTaskServiceImpl.java new file mode 100644 index 000000000..a36cca31f --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/db/SchedulerTaskServiceImpl.java @@ -0,0 +1,91 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.db; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerTaskRepository; +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Service; + +/** Scheduler Task Service implementation class: Add, delete, update, and query tasks */ +@Service +@ConditionalOnProperty(name = "scheduler.metadata.store.type", havingValue = "db") +public class SchedulerTaskServiceImpl implements SchedulerTaskService { + + @Autowired private SchedulerTaskRepository schedulerTaskRepository; + + @Override + public Long insert(SchedulerTask record) { + return schedulerTaskRepository.insert(record); + } + + @Override + public synchronized int deleteByJobId(Long jobId) { + return schedulerTaskRepository.deleteByJobId(jobId); + } + + @Override + public synchronized Long update(SchedulerTask record) { + return schedulerTaskRepository.update(record); + } + + @Override + public synchronized Long replace(SchedulerTask record) { + if (record.getId() == null) { + return insert(record); + } else { + return update(record); + } + } + + @Override + public SchedulerTask getById(Long id) { + return schedulerTaskRepository.getById(id); + } + + @Override + public Paged query(SchedulerTaskQuery record) { + return schedulerTaskRepository.query(record); + } + + @Override + public SchedulerTask queryByInstanceIdAndNodeId(Long instanceId, String nodeId) { + return schedulerTaskRepository.queryByInstanceIdAndNodeId(instanceId, nodeId); + } + + @Override + public List queryByInstanceId(Long instanceId) { + return schedulerTaskRepository.queryByInstanceId(instanceId); + } + + @Override + public int setStatusByInstanceId(Long instanceId, TaskStatus status) { + return schedulerTaskRepository.setStatusByInstanceId(instanceId, status); + } + + @Override + public int updateLock(Long id) { + return schedulerTaskRepository.updateLock(id); + } + + @Override + public int updateUnlock(Long id) { + return schedulerTaskRepository.updateUnlock(id); + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInfoServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInfoServiceImpl.java new file mode 100644 index 000000000..663b60d37 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInfoServiceImpl.java @@ -0,0 +1,133 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.local; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.exception.SchedulerException; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInfoQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerInfoService; +import com.antgroup.openspg.server.core.scheduler.service.utils.SchedulerUtils; +import com.google.common.collect.Lists; +import java.util.Date; +import java.util.List; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.atomic.AtomicLong; +import org.springframework.beans.BeanUtils; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Service; + +/** Scheduler Info Service implementation class: Add, delete, update, and query infos */ +@Service +@ConditionalOnProperty(name = "scheduler.metadata.store.type", havingValue = "local") +public class LocalSchedulerInfoServiceImpl implements SchedulerInfoService { + + private static ConcurrentHashMap infos = new ConcurrentHashMap<>(); + private static AtomicLong maxId = new AtomicLong(0L); + + @Override + public synchronized Long insert(SchedulerInfo record) { + Long id = maxId.incrementAndGet(); + record.setId(id); + record.setGmtModified(new Date()); + infos.put(id, record); + return id; + } + + @Override + public synchronized int deleteById(Long id) { + SchedulerInfo record = infos.remove(id); + return record == null ? 0 : 1; + } + + @Override + public synchronized Long update(SchedulerInfo record) { + Long id = record.getId(); + SchedulerInfo old = getById(id); + if (record.getGmtModified() != null && !old.getGmtModified().equals(record.getGmtModified())) { + return 0L; + } + record = SchedulerUtils.merge(old, record); + record.setGmtModified(new Date()); + infos.put(id, record); + return id; + } + + @Override + public SchedulerInfo getById(Long id) { + SchedulerInfo oldInfo = infos.get(id); + if (oldInfo == null) { + throw new SchedulerException("not find id {}", id); + } + SchedulerInfo info = new SchedulerInfo(); + BeanUtils.copyProperties(oldInfo, info); + return info; + } + + @Override + public Paged query(SchedulerInfoQuery record) { + List infoList = Lists.newArrayList(); + for (Long key : infos.keySet()) { + SchedulerInfo info = infos.get(key); + + // Filter info by fields + if (!SchedulerUtils.compare(info.getId(), record.getId(), SchedulerUtils.EQ) + || !SchedulerUtils.compare(info.getStatus(), record.getStatus(), SchedulerUtils.EQ) + || !SchedulerUtils.compare(info.getName(), record.getName(), SchedulerUtils.IN) + || !SchedulerUtils.compare(info.getConfig(), record.getConfig(), SchedulerUtils.IN)) { + continue; + } + + SchedulerInfo target = new SchedulerInfo(); + BeanUtils.copyProperties(info, target); + infoList.add(target); + } + Paged paged = new Paged<>(record.getPageSize(), record.getPageNo()); + paged.setResults(infoList); + return paged; + } + + @Override + public SchedulerInfo getByName(String name) { + for (Long key : infos.keySet()) { + SchedulerInfo info = infos.get(key); + if (name.equals(info.getName())) { + SchedulerInfo record = new SchedulerInfo(); + BeanUtils.copyProperties(info, record); + return record; + } + } + throw new SchedulerException("not find name {}", name); + } + + @Override + public int updateLock(Long id) { + SchedulerInfo oldRecord = getById(id); + if (oldRecord.getLockTime() != null) { + return 0; + } + oldRecord.setGmtModified(new Date()); + oldRecord.setLockTime(new Date()); + infos.put(oldRecord.getId(), oldRecord); + return 1; + } + + @Override + public int updateUnlock(Long id) { + SchedulerInfo oldRecord = getById(id); + oldRecord.setGmtModified(new Date()); + oldRecord.setLockTime(null); + infos.put(oldRecord.getId(), oldRecord); + return 1; + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInstanceServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInstanceServiceImpl.java index 5216afaae..e9a710ed6 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInstanceServiceImpl.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerInstanceServiceImpl.java @@ -12,11 +12,12 @@ */ package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.local; +import com.antgroup.openspg.server.api.facade.Paged; import com.antgroup.openspg.server.common.model.exception.SchedulerException; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.InstanceStatus; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerInstanceService; -import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; import com.antgroup.openspg.server.core.scheduler.service.utils.SchedulerUtils; import com.google.common.collect.Lists; import java.util.Date; @@ -25,7 +26,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import org.springframework.beans.BeanUtils; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Service; @@ -37,8 +37,6 @@ public class LocalSchedulerInstanceServiceImpl implements SchedulerInstanceServi private static ConcurrentHashMap instances = new ConcurrentHashMap<>(); private static AtomicLong maxId = new AtomicLong(0L); - @Autowired SchedulerTaskService schedulerTaskService; - @Override public synchronized Long insert(SchedulerInstance record) { String uniqueId = record.getUniqueId(); @@ -109,7 +107,7 @@ public SchedulerInstance getByUniqueId(String instanceId) { } @Override - public List query(SchedulerInstance record) { + public Paged query(SchedulerInstanceQuery record) { List instanceList = Lists.newArrayList(); for (Long key : instances.keySet()) { SchedulerInstance instance = instances.get(key); @@ -140,12 +138,14 @@ public List query(SchedulerInstance record) { BeanUtils.copyProperties(instance, target); instanceList.add(target); } - return instanceList; + Paged paged = new Paged<>(record.getPageSize(), record.getPageNo()); + paged.setResults(instanceList); + return paged; } @Override - public List getNotFinishInstance(SchedulerInstance record) { - List instanceList = query(record); + public List getNotFinishInstance(SchedulerInstanceQuery record) { + List instanceList = query(record).getResults(); instanceList = instanceList.stream() .filter(s -> !InstanceStatus.isFinished(s.getStatus())) diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerJobServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerJobServiceImpl.java index 5db872ab1..71a5e1d31 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerJobServiceImpl.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerJobServiceImpl.java @@ -12,7 +12,9 @@ */ package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.local; +import com.antgroup.openspg.server.api.facade.Paged; import com.antgroup.openspg.server.common.model.exception.SchedulerException; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerJobService; import com.antgroup.openspg.server.core.scheduler.service.utils.SchedulerUtils; @@ -73,7 +75,7 @@ public SchedulerJob getById(Long id) { } @Override - public List query(SchedulerJob record) { + public Paged query(SchedulerJobQuery record) { List jobList = Lists.newArrayList(); for (Long key : jobs.keySet()) { SchedulerJob job = jobs.get(key); @@ -94,6 +96,8 @@ public List query(SchedulerJob record) { BeanUtils.copyProperties(job, target); jobList.add(target); } - return jobList; + Paged paged = new Paged<>(record.getPageSize(), record.getPageNo()); + paged.setResults(jobList); + return paged; } } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerTaskServiceImpl.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerTaskServiceImpl.java index cf85f6f43..6bb84e95d 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerTaskServiceImpl.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/metadata/impl/local/LocalSchedulerTaskServiceImpl.java @@ -12,8 +12,10 @@ */ package com.antgroup.openspg.server.core.scheduler.service.metadata.impl.local; +import com.antgroup.openspg.server.api.facade.Paged; import com.antgroup.openspg.server.common.model.exception.SchedulerException; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TaskStatus; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; import com.antgroup.openspg.server.core.scheduler.service.utils.SchedulerUtils; @@ -92,7 +94,7 @@ public SchedulerTask getById(Long id) { } @Override - public List query(SchedulerTask record) { + public Paged query(SchedulerTaskQuery record) { List taskList = Lists.newArrayList(); for (Long key : tasks.keySet()) { SchedulerTask task = tasks.get(key); @@ -111,14 +113,16 @@ public List query(SchedulerTask record) { BeanUtils.copyProperties(task, target); taskList.add(target); } - return taskList; + Paged paged = new Paged<>(record.getPageSize(), record.getPageNo()); + paged.setResults(taskList); + return paged; } @Override - public SchedulerTask queryByInstanceIdAndType(Long instanceId, String type) { + public SchedulerTask queryByInstanceIdAndNodeId(Long instanceId, String nodeId) { for (Long key : tasks.keySet()) { SchedulerTask task = tasks.get(key); - if (instanceId.equals(task.getInstanceId()) && type.equalsIgnoreCase(task.getType())) { + if (instanceId.equals(task.getInstanceId()) && nodeId.equalsIgnoreCase(task.getNodeId())) { SchedulerTask target = new SchedulerTask(); BeanUtils.copyProperties(task, target); return target; diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInfoRepository.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInfoRepository.java new file mode 100644 index 000000000..113de25a7 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInfoRepository.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.core.scheduler.service.repository; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInfoQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; + +/** + * The read-write interface for scheduler info in the database, provides methods for saving, + * updating, deleting, and querying properties and relations. + */ +public interface SchedulerInfoRepository { + + /** insert Info */ + Long insert(SchedulerInfo record); + + /** update By Id */ + Long update(SchedulerInfo record); + + /** delete By jobId */ + int deleteById(Long id); + + /** get By id */ + SchedulerInfo getById(Long id); + + /** get By name */ + SchedulerInfo getByName(String name); + + /** query By Condition */ + Paged query(SchedulerInfoQuery record); + + /** update Lock */ + int updateLock(Long id); + + /** update Unlock */ + int updateUnlock(Long id); +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInstanceRepository.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInstanceRepository.java new file mode 100644 index 000000000..b2534e80d --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerInstanceRepository.java @@ -0,0 +1,47 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.core.scheduler.service.repository; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import java.util.List; + +/** + * The read-write interface for scheduler instance in the database, provides methods for saving, + * updating, deleting, and querying properties and relations. + */ +public interface SchedulerInstanceRepository { + + /** insert Instance */ + Long insert(SchedulerInstance record); + + /** delete By JobId */ + int deleteByJobId(Long jobId); + + /** update */ + Long update(SchedulerInstance record); + + /** get By id */ + SchedulerInstance getById(Long id); + + /** get By uniqueId */ + SchedulerInstance getByUniqueId(String uniqueId); + + /** query By Condition */ + Paged query(SchedulerInstanceQuery record); + + /** get Not Finish Instance */ + List getNotFinishInstance(SchedulerInstanceQuery record); +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerJobRepository.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerJobRepository.java new file mode 100644 index 000000000..f4dd76abb --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerJobRepository.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.core.scheduler.service.repository; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; + +/** + * The read-write interface for scheduler job in the database, provides methods for saving, + * updating, deleting, and querying properties and relations. + */ +public interface SchedulerJobRepository { + + /** insert Job */ + Long insert(SchedulerJob record); + + /** delete By Id */ + int deleteById(Long id); + + /** update Job */ + Long update(SchedulerJob record); + + /** get By id */ + SchedulerJob getById(Long id); + + /** query By Condition */ + Paged query(SchedulerJobQuery record); +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerTaskRepository.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerTaskRepository.java new file mode 100644 index 000000000..a6a864831 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/repository/SchedulerTaskRepository.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.core.scheduler.service.repository; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import java.util.List; + +/** + * The read-write interface for scheduler task in the database, provides methods for saving, + * updating, deleting, and querying properties and relations. + */ +public interface SchedulerTaskRepository { + + /** insert Task */ + Long insert(SchedulerTask record); + + /** delete By jobId */ + int deleteByJobId(Long jobId); + + /** update By Id */ + Long update(SchedulerTask record); + + /** get By id */ + SchedulerTask getById(Long id); + + /** query By Condition */ + Paged query(SchedulerTaskQuery record); + + /** query By InstanceId And nodeId */ + SchedulerTask queryByInstanceIdAndNodeId(Long instanceId, String nodeId); + + /** query By InstanceId */ + List queryByInstanceId(Long instanceId); + + /** set Status By InstanceId */ + int setStatusByInstanceId(Long instanceId, SchedulerEnum.TaskStatus status); + + /** update Lock */ + int updateLock(Long id); + + /** update Unlock */ + int updateUnlock(Long id); +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/TaskExecuteTemplate.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/TaskExecuteTemplate.java index 56842510e..64d6c9d1d 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/TaskExecuteTemplate.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/TaskExecuteTemplate.java @@ -27,6 +27,7 @@ import java.util.List; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.CollectionUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.time.DateUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -131,6 +132,9 @@ public void finallyFunc(TaskExecuteContext context) { SchedulerTask old = schedulerTaskService.getById(task.getId()); if (TaskStatus.isFinished(old.getStatus())) { context.addTraceLog("Task has been completed by other threads,status:%s!", old.getStatus()); + if (StringUtils.isBlank(old.getOutput())) { + old.setOutput(task.getOutput()); + } task = old; } @@ -175,8 +179,7 @@ private void startNextNode( return; } SchedulerTask nextTask = - schedulerTaskService.queryByInstanceIdAndType( - task.getInstanceId(), nextNode.getTaskComponent()); + schedulerTaskService.queryByInstanceIdAndNodeId(task.getInstanceId(), nextNode.getId()); SchedulerTask updateTask = new SchedulerTask(); updateTask.setId(nextTask.getId()); String name = nextNode.getName(); @@ -208,8 +211,7 @@ private boolean checkAllTasksFinished(SchedulerTask task, List ta private boolean checkAllNodesFinished(SchedulerTask task, List nodes) { for (TaskExecuteDag.Node node : nodes) { SchedulerTask t = - schedulerTaskService.queryByInstanceIdAndType( - task.getInstanceId(), node.getTaskComponent()); + schedulerTaskService.queryByInstanceIdAndNodeId(task.getInstanceId(), node.getId()); if (!node.getId().equals(task.getNodeId()) && !TaskStatus.isFinished(t.getStatus())) { return false; } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/ComputingEngineAsyncTask.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/ComputingEngineAsyncTask.java new file mode 100644 index 000000000..4445d209d --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/ComputingEngineAsyncTask.java @@ -0,0 +1,212 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.task.async.builder; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.builder.model.BuilderConstants; +import com.antgroup.openspg.builder.model.record.RecordAlterOperationEnum; +import com.antgroup.openspg.cloudext.interfaces.computingengine.ComputingEngineClient; +import com.antgroup.openspg.cloudext.interfaces.computingengine.ComputingEngineClientDriverManager; +import com.antgroup.openspg.cloudext.interfaces.computingengine.ComputingEngineConstants; +import com.antgroup.openspg.cloudext.interfaces.computingengine.model.ComputingStatusEnum; +import com.antgroup.openspg.cloudext.interfaces.computingengine.model.ComputingTask; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.builder.BuilderJobService; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.common.service.project.ProjectService; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteContext; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerInstanceService; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.task.async.AsyncTaskExecuteTemplate; +import java.util.Date; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.util.Assert; + +@Component("computingEngineAsyncTask") +public class ComputingEngineAsyncTask extends AsyncTaskExecuteTemplate { + + @Autowired private DefaultValue value; + + @Autowired private BuilderJobService builderJobService; + + @Autowired private SchedulerInstanceService instanceService; + + @Autowired private SchedulerTaskService taskService; + + @Autowired private ProjectService projectManager; + + @Override + public String submit(TaskExecuteContext context) { + SchedulerJob job = context.getJob(); + BuilderJob builderJob = builderJobService.getById(Long.valueOf(job.getInvokerId())); + ComputingEngineClient client = + ComputingEngineClientDriverManager.getClient(value.getComputingEngineUrl()); + context.addTraceLog("Start assembling the computing engine configuration information"); + JSONObject extension = initExtension(builderJob); + long startTime = System.currentTimeMillis(); + ComputingTask computingTask = client.submitBuilderJob(builderJob, extension); + long time = System.currentTimeMillis() - startTime; + context.addTraceLog( + "submit builder job succeed. resource:%s, cost:%s ms, details:%s", + computingTask.getTaskId(), time, computingTask.getLogUrl()); + return computingTask.getTaskId(); + } + + public static void putOrDefault(JSONObject extension, String key, Object defaultValue) { + if (extension == null) { + extension = new JSONObject(); + } + if (!extension.containsKey(key) || extension.get(key) == null) { + extension.put(key, defaultValue); + return; + } + return; + } + + public JSONObject initExtension(BuilderJob builderJob) { + JSONObject extension = JSONObject.parseObject(builderJob.getComputingConf()); + putOrDefault(extension, BuilderConstants.PYTHON_EXEC_OPTION, value.getPythonExec()); + putOrDefault(extension, BuilderConstants.PYTHON_PATHS_OPTION, value.getPythonPaths()); + putOrDefault(extension, BuilderConstants.SCHEMA_URL_OPTION, value.getSchemaUrlHost()); + putOrDefault(extension, BuilderConstants.PARALLELISM_OPTION, 1); + String jobAction = + (builderJob.getAction() != null) + ? builderJob.getAction() + : RecordAlterOperationEnum.UPSERT.name(); + putOrDefault(extension, BuilderConstants.ALTER_OPERATION_OPTION, jobAction); + putOrDefault(extension, BuilderConstants.LEAD_TO_OPTION, false); + putOrDefault(extension, BuilderConstants.GRAPH_STORE_URL_OPTION, value.getGraphStoreUrl()); + putOrDefault(extension, BuilderConstants.SEARCH_ENGINE_URL_OPTION, value.getSearchEngineUrl()); + putOrDefault(extension, BuilderConstants.MODEL_EXECUTE_NUM_OPTION, value.getModelExecuteNum()); + putOrDefault( + extension, + BuilderConstants.PROJECT_OPTION, + JSON.toJSONString(projectManager.queryById(builderJob.getProjectId()))); + return extension; + } + + @Override + public SchedulerEnum.TaskStatus getStatus(TaskExecuteContext context, String resource) { + ComputingEngineClient client = + ComputingEngineClientDriverManager.getClient(value.getComputingEngineUrl()); + context.addTraceLog("Get the computing engine task status based on taskId:%s", resource); + ComputingStatusEnum statusEnum = client.queryStatus(new JSONObject(), resource); + context.addTraceLog( + "The computing engine status was obtained successfully. The status is:%s", + statusEnum.name()); + switch (statusEnum) { + case RUNNING: + return processByRunning(context); + case SUCCESS: + return processByFinished(context, resource); + case FAILED: + case STOP: + return processByFailed(context, resource); + case NOTFOUND: + return processNotFound(context, resource); + default: + context.addTraceLog( + "The computing engine status is: %s, no operation is performed", statusEnum.name()); + break; + } + return SchedulerEnum.TaskStatus.RUNNING; + } + + public SchedulerEnum.TaskStatus processByRunning(TaskExecuteContext context) { + SchedulerInstance kgJobInstance = context.getInstance(); + SchedulerInstance updateInstance = new SchedulerInstance(); + updateInstance.setId(kgJobInstance.getId()); + updateInstance.setStatus(SchedulerEnum.InstanceStatus.RUNNING); + instanceService.update(updateInstance); + if (SchedulerEnum.LifeCycle.REAL_TIME.equals(kgJobInstance.getLifeCycle())) { + context.addTraceLog( + "The current build task is a real-time task, and the computing engine task is running continuously. Task scheduling " + + "has been completed!"); + taskService.setStatusByInstanceId(kgJobInstance.getId(), SchedulerEnum.TaskStatus.SKIP); + return SchedulerEnum.TaskStatus.FINISH; + } else { + context.addTraceLog( + "The computing engine task is still running. Please wait for the execution to complete."); + return SchedulerEnum.TaskStatus.RUNNING; + } + } + + public SchedulerEnum.TaskStatus processByFailed(TaskExecuteContext context, String resource) { + SchedulerTask task = context.getTask(); + context.addTraceLog( + "The computing engine task failed to run. Please check the task log. taskId:%s", resource); + int retryNum = 10; + if (task.getExecuteNum() % retryNum == 0) { + context.addTraceLog( + "The computing engine task has been in a failed state. The program automatically resubmits the task"); + String taskId = submit(context); + context.addTraceLog("The computing engine task resubmit successful! taskId:%s", taskId); + SchedulerTask updateTask = new SchedulerTask(); + updateTask.setId(task.getId()); + updateTask.setResource(taskId); + taskService.update(updateTask); + return SchedulerEnum.TaskStatus.RUNNING; + } + return SchedulerEnum.TaskStatus.ERROR; + } + + public SchedulerEnum.TaskStatus processNotFound(TaskExecuteContext context, String resource) { + SchedulerTask task = context.getTask(); + int retryNum = 5; + if (task.getExecuteNum() % retryNum != 0) { + context.addTraceLog( + "The computing engine task is being submitted, taskId: %s. Waiting for the next scheduling", + resource); + return SchedulerEnum.TaskStatus.ERROR; + } + context.addTraceLog( + "The computing engine task has been manually offline or deleted. Automatic execution recovery mechanism"); + String taskId = submit(context); + context.addTraceLog("The computing engine task resubmit successful! taskId:%s", taskId); + SchedulerTask updateTask = new SchedulerTask(); + updateTask.setId(task.getId()); + updateTask.setResource(taskId); + taskService.update(updateTask); + return SchedulerEnum.TaskStatus.RUNNING; + } + + public SchedulerEnum.TaskStatus processByFinished(TaskExecuteContext context, String resource) { + SchedulerInstance instance = context.getInstance(); + SchedulerInstance updateInstance = new SchedulerInstance(); + updateInstance.setId(instance.getId()); + updateInstance.setFinishTime(new Date()); + updateInstance.setProgress(100L); + Long count = instanceService.update(updateInstance); + Assert.isTrue( + count > 0, "Data update failed, instance: " + JSONObject.toJSONString(updateInstance)); + context.addTraceLog("All calculation engine tasks have been completed! taskId:%s", resource); + return SchedulerEnum.TaskStatus.FINISH; + } + + @Override + public Boolean stop(TaskExecuteContext context, String resource) { + ComputingEngineClient client = + ComputingEngineClientDriverManager.getClient(value.getComputingEngineUrl()); + SchedulerJob job = context.getJob(); + BuilderJob builderJob = builderJobService.getById(Long.valueOf(job.getInvokerId())); + JSONObject extension = new JSONObject(); + extension.put(ComputingEngineConstants.USER_NUMBER, builderJob.getModifyUser()); + return client.stop(extension, resource); + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagAlignmentAsyncTask.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagAlignmentAsyncTask.java new file mode 100644 index 000000000..a2dd524e9 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagAlignmentAsyncTask.java @@ -0,0 +1,221 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.task.async.builder; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.antgroup.openspg.builder.model.record.SubGraphRecord; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.common.util.pemja.PemjaUtils; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteContext; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; +import com.antgroup.openspg.server.core.scheduler.service.common.MemoryTaskServer; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.task.async.AsyncTaskExecuteTemplate; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.util.List; +import java.util.Map; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component("kagAlignmentAsyncTask") +public class KagAlignmentAsyncTask extends AsyncTaskExecuteTemplate { + + @Autowired private DefaultValue value; + + @Autowired private MemoryTaskServer memoryTaskServer; + + @Autowired private SchedulerTaskService taskService; + + @Override + public String submit(TaskExecuteContext context) { + SchedulerInstance instance = context.getInstance(); + SchedulerTask task = context.getTask(); + String key = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + SchedulerTask memoryTask = memoryTaskServer.getTask(key); + if (memoryTask != null) { + context.addTraceLog("Alignment task already exists; reuse it"); + return memoryTask.getNodeId(); + } + + List inputs = getInputs(instance, task); + String taskId = + memoryTaskServer.submit(new VectorizerTaskCallable(value, context, inputs), key); + return taskId; + } + + private List getInputs(SchedulerInstance instance, SchedulerTask task) { + List nodes = + instance.getTaskDag().getRelatedNodes(task.getNodeId(), false); + List inputs = Lists.newArrayList(); + nodes.forEach( + node -> { + SchedulerTask preTask = + taskService.queryByInstanceIdAndNodeId(task.getInstanceId(), node.getId()); + if (preTask != null && StringUtils.isNotBlank(preTask.getOutput())) { + inputs.add(preTask.getOutput()); + } + }); + return inputs; + } + + @Override + public SchedulerEnum.TaskStatus getStatus(TaskExecuteContext context, String resource) { + SchedulerTask task = memoryTaskServer.getTask(resource); + SchedulerTask schedulerTask = context.getTask(); + if (task == null) { + context.addTraceLog("Alignment task(%s) not found, resubmit", resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + context.addTraceLog("Alignment task status is %s", task.getStatus()); + if (StringUtils.isNotBlank(task.getTraceLog())) { + context.addTraceLog( + "Alignment task traceLog:%s%s", System.getProperty("line.separator"), task.getTraceLog()); + task.setTraceLog(""); + } + switch (task.getStatus()) { + case RUNNING: + break; + case ERROR: + int retryNum = 3; + if (schedulerTask.getExecuteNum() % retryNum == 0) { + context.addTraceLog("Alignment task(%s) status is ERROR, resubmit", resource); + memoryTaskServer.stopTask(resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + break; + case FINISH: + memoryTaskServer.stopTask(resource); + schedulerTask.setOutput(resource); + removeInputs(context); + break; + default: + context.addTraceLog("Alignment Task Status is %s. Do nothing", task.getStatus()); + break; + } + return task.getStatus(); + } + + public void removeInputs(TaskExecuteContext context) { + SchedulerInstance instance = context.getInstance(); + SchedulerTask task = context.getTask(); + List inputs = getInputs(instance, task); + ObjectStorageClient objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + for (String input : inputs) { + objectStorageClient.removeObject(value.getBuilderBucketName(), input); + } + } + + @Override + public Boolean stop(TaskExecuteContext context, String resource) { + return memoryTaskServer.stopTask(resource); + } + + private static class VectorizerTaskCallable extends MemoryTaskServer.MemoryTaskCallable { + + private DefaultValue value; + + private ObjectStorageClient objectStorageClient; + + private TaskExecuteContext context; + + private List inputs; + + public VectorizerTaskCallable( + DefaultValue value, TaskExecuteContext context, List inputs) { + this.value = value; + this.objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + this.context = context; + this.inputs = inputs; + } + + @Override + public String call() throws Exception { + List subGraphList = Lists.newArrayList(); + addTraceLog("Start Alignment task..."); + for (String input : inputs) { + String data = objectStorageClient.getString(value.getBuilderBucketName(), input); + List subGraphs = + JSON.parseObject(data, new TypeReference>() {}); + subGraphList.addAll(alignment(context, subGraphs)); + } + addTraceLog("Alignment task complete..."); + SchedulerTask task = context.getTask(); + String fileKey = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + objectStorageClient.saveString( + value.getBuilderBucketName(), JSON.toJSONString(subGraphList), fileKey); + addTraceLog( + "Alignment result is stored bucket:%s file:%s", value.getBuilderBucketName(), fileKey); + return fileKey; + } + + public List alignment( + TaskExecuteContext context, List subGraphs) { + List subGraphList = Lists.newArrayList(); + Long projectId = context.getInstance().getProjectId(); + PythonInvokeMethod alignment = PythonInvokeMethod.BRIDGE_COMPONENT; + JSONObject pyConfig = new JSONObject(); + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.BASE); + + PemjaConfig pemjaConfig = + new PemjaConfig( + value.getPythonExec(), + value.getPythonPaths(), + value.getSchemaUrlHost(), + projectId, + alignment, + Maps.newHashMap()); + for (SubGraphRecord subGraph : subGraphs) { + addTraceLog("invoke alignment processor operator:%s", pemjaConfig.getClassName()); + Map map = new ObjectMapper().convertValue(subGraph, Map.class); + List result = + (List) + PemjaUtils.invoke( + pemjaConfig, BuilderConstant.POSTPROCESSOR_ABC, pyConfig.toJSONString(), map); + List records = + JSON.parseObject( + JSON.toJSONString(result), new TypeReference>() {}); + subGraphList.addAll(records); + for (SubGraphRecord subGraphRecord : records) { + addTraceLog( + "alignment processor succeed node:%s edge%s", + subGraphRecord.getResultNodes().size(), subGraphRecord.getResultEdges().size()); + } + } + return subGraphList; + } + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagExtractorAsyncTask.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagExtractorAsyncTask.java new file mode 100644 index 000000000..91e72d2d9 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagExtractorAsyncTask.java @@ -0,0 +1,280 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.task.async.builder; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.antgroup.openspg.builder.model.record.ChunkRecord; +import com.antgroup.openspg.builder.model.record.SubGraphRecord; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.common.util.pemja.PemjaUtils; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; +import com.antgroup.openspg.server.common.model.CommonConstants; +import com.antgroup.openspg.server.common.model.project.Project; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.common.service.project.ProjectService; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteContext; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; +import com.antgroup.openspg.server.core.scheduler.service.common.MemoryTaskServer; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.task.async.AsyncTaskExecuteTemplate; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.RejectedExecutionHandler; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import javax.annotation.PostConstruct; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component("kagExtractorAsyncTask") +public class KagExtractorAsyncTask extends AsyncTaskExecuteTemplate { + + private static final RejectedExecutionHandler handler = + (r, executor) -> { + try { + executor.getQueue().put(r); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + }; + + private static ThreadPoolExecutor executor; + + @Autowired private DefaultValue value; + + @Autowired private MemoryTaskServer memoryTaskServer; + + @Autowired private SchedulerTaskService taskService; + + @Autowired private ProjectService projectService; + + @PostConstruct + public void init() { + if (executor == null) { + executor = + new ThreadPoolExecutor( + value.getModelExecuteNum(), + value.getModelExecuteNum(), + 60 * 60, + TimeUnit.SECONDS, + new LinkedBlockingQueue<>(100), + handler); + } + } + + @Override + public String submit(TaskExecuteContext context) { + SchedulerInstance instance = context.getInstance(); + SchedulerTask task = context.getTask(); + String key = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + SchedulerTask memoryTask = memoryTaskServer.getTask(key); + if (memoryTask != null) { + context.addTraceLog("Extractor task already exists; reuse it"); + return memoryTask.getNodeId(); + } + + List nodes = + instance.getTaskDag().getRelatedNodes(task.getNodeId(), false); + List inputs = Lists.newArrayList(); + nodes.forEach( + node -> { + SchedulerTask preTask = + taskService.queryByInstanceIdAndNodeId(task.getInstanceId(), node.getId()); + if (preTask != null && StringUtils.isNotBlank(preTask.getOutput())) { + inputs.add(preTask.getOutput()); + } + }); + Project project = projectService.queryById(instance.getProjectId()); + String taskId = + memoryTaskServer.submit(new ExtractorTaskCallable(value, project, context, inputs), key); + return taskId; + } + + @Override + public SchedulerEnum.TaskStatus getStatus(TaskExecuteContext context, String resource) { + SchedulerTask task = memoryTaskServer.getTask(resource); + SchedulerTask schedulerTask = context.getTask(); + if (task == null) { + context.addTraceLog("Extractor task(%s) not found, resubmit", resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + context.addTraceLog("Extractor task status is %s", task.getStatus()); + if (StringUtils.isNotBlank(task.getTraceLog())) { + context.addTraceLog( + "Extractor task traceLog:%s%s", System.getProperty("line.separator"), task.getTraceLog()); + task.setTraceLog(""); + } + switch (task.getStatus()) { + case RUNNING: + break; + case ERROR: + int retryNum = 3; + if (schedulerTask.getExecuteNum() % retryNum == 0) { + context.addTraceLog("Extractor task(%s) status is ERROR, resubmit", resource); + memoryTaskServer.stopTask(resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + break; + case FINISH: + String fileKey = + CommonUtils.getTaskStorageFileKey( + schedulerTask.getProjectId(), + schedulerTask.getInstanceId(), + schedulerTask.getId(), + schedulerTask.getType()); + memoryTaskServer.stopTask(resource); + schedulerTask.setOutput(fileKey); + break; + default: + context.addTraceLog("Extractor Task Status is %s. Do nothing", task.getStatus()); + break; + } + return task.getStatus(); + } + + @Override + public Boolean stop(TaskExecuteContext context, String resource) { + return memoryTaskServer.stopTask(resource); + } + + private static class ExtractorTaskCallable extends MemoryTaskServer.MemoryTaskCallable { + + private DefaultValue value; + + private ObjectStorageClient objectStorageClient; + + private TaskExecuteContext context; + + private List inputs; + + private Project project; + + public ExtractorTaskCallable( + DefaultValue value, Project project, TaskExecuteContext context, List inputs) { + this.value = value; + this.objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + this.context = context; + this.inputs = inputs; + this.project = project; + } + + @Override + public String call() throws Exception { + List chunkList = Lists.newArrayList(); + for (String input : inputs) { + String data = objectStorageClient.getString(value.getBuilderBucketName(), input); + List chunks = + JSON.parseObject(data, new TypeReference>() {}); + chunkList.addAll(chunks); + } + addTraceLog("Start extract document chunk. chunk size:%s", chunkList.size()); + + List>> futures = new ArrayList<>(); + List results = new ArrayList<>(); + for (ChunkRecord.Chunk chunk : chunkList) { + Future> future = + executor.submit(new ExtractTaskCallable(chunk, value, project)); + futures.add(future); + } + + for (Future> future : futures) { + try { + List result = future.get(); + results.addAll(result); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException("invoke extract Exception", e); + } + } + addTraceLog("extract document complete."); + + SchedulerTask task = context.getTask(); + String fileKey = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + objectStorageClient.saveString( + value.getBuilderBucketName(), JSON.toJSONString(results), fileKey); + addTraceLog( + "extract result is stored bucket:%s file:%s", value.getBuilderBucketName(), fileKey); + return fileKey; + } + + class ExtractTaskCallable implements Callable> { + private final ChunkRecord.Chunk chunk; + private final DefaultValue value; + private final Project project; + + public ExtractTaskCallable(ChunkRecord.Chunk chunk, DefaultValue value, Project project) { + this.chunk = chunk; + this.value = value; + this.project = project; + } + + @Override + public List call() throws Exception { + PythonInvokeMethod extractor = PythonInvokeMethod.BRIDGE_COMPONENT; + String projectConfig = project.getConfig(); + JSONObject llm = JSONObject.parseObject(projectConfig).getJSONObject(CommonConstants.LLM); + JSONObject pyConfig = new JSONObject(); + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.SCHEMA_FREE); + pyConfig.put(BuilderConstant.LLM, llm); + PemjaConfig pemjaConfig = + new PemjaConfig( + value.getPythonExec(), + value.getPythonPaths(), + value.getSchemaUrlHost(), + project.getId(), + extractor, + Maps.newHashMap()); + addTraceLog("invoke extractor chunk:%s", chunk.getName()); + Map map = new ObjectMapper().convertValue(chunk, Map.class); + List result = + (List) + PemjaUtils.invoke( + pemjaConfig, BuilderConstant.EXTRACTOR_ABC, pyConfig.toJSONString(), map); + List records = + JSON.parseObject( + JSON.toJSONString(result), new TypeReference>() {}); + // com.antgroup.openspg.builder.core.physical.utils.CommonUtils.addLabelPrefix(project.getNamespace(), records); + addTraceLog( + "invoke extract operator:%s chunk:%s succeed", + pemjaConfig.getClassName(), chunk.getName()); + return records; + } + } + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagSplitterAsyncTask.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagSplitterAsyncTask.java new file mode 100644 index 000000000..0eca748a3 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagSplitterAsyncTask.java @@ -0,0 +1,228 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.task.async.builder; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.antgroup.openspg.builder.model.record.ChunkRecord; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.common.util.pemja.PemjaUtils; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.project.Project; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.builder.BuilderJobService; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.common.service.project.ProjectService; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteContext; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; +import com.antgroup.openspg.server.core.scheduler.service.common.MemoryTaskServer; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.task.async.AsyncTaskExecuteTemplate; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import java.util.List; +import java.util.Map; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component("kagSplitterAsyncTask") +public class KagSplitterAsyncTask extends AsyncTaskExecuteTemplate { + + @Autowired private DefaultValue value; + + @Autowired private BuilderJobService builderJobService; + + @Autowired private MemoryTaskServer memoryTaskServer; + + @Autowired private SchedulerTaskService taskService; + + @Autowired private ProjectService projectService; + + @Override + public String submit(TaskExecuteContext context) { + SchedulerInstance instance = context.getInstance(); + SchedulerTask task = context.getTask(); + String key = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + SchedulerTask memoryTask = memoryTaskServer.getTask(key); + if (memoryTask != null) { + context.addTraceLog("Splitter task already exists; reuse it"); + return memoryTask.getNodeId(); + } + + List nodes = + instance.getTaskDag().getRelatedNodes(task.getNodeId(), false); + List inputs = Lists.newArrayList(); + nodes.forEach( + node -> { + SchedulerTask preTask = + taskService.queryByInstanceIdAndNodeId(task.getInstanceId(), node.getId()); + if (preTask != null && StringUtils.isNotBlank(preTask.getOutput())) { + inputs.add(preTask.getOutput()); + } + }); + String taskId = + memoryTaskServer.submit( + new SplitterTaskCallable(value, builderJobService, projectService, context, inputs), + key); + return taskId; + } + + @Override + public SchedulerEnum.TaskStatus getStatus(TaskExecuteContext context, String resource) { + SchedulerTask task = memoryTaskServer.getTask(resource); + SchedulerTask schedulerTask = context.getTask(); + if (task == null) { + context.addTraceLog("Splitter task(%s) not found, resubmit", resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + context.addTraceLog("Splitter task status is %s", task.getStatus()); + if (StringUtils.isNotBlank(task.getTraceLog())) { + context.addTraceLog( + "Splitter task traceLog:%s%s", System.getProperty("line.separator"), task.getTraceLog()); + task.setTraceLog(""); + } + switch (task.getStatus()) { + case RUNNING: + break; + case ERROR: + int retryNum = 3; + if (schedulerTask.getExecuteNum() % retryNum == 0) { + context.addTraceLog("Splitter task(%s) status is ERROR, resubmit", resource); + memoryTaskServer.stopTask(resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + break; + case FINISH: + String fileKey = + CommonUtils.getTaskStorageFileKey( + schedulerTask.getProjectId(), + schedulerTask.getInstanceId(), + schedulerTask.getId(), + schedulerTask.getType()); + memoryTaskServer.stopTask(resource); + schedulerTask.setOutput(fileKey); + break; + default: + context.addTraceLog("Splitter Task Status is %s. Do nothing", task.getStatus()); + break; + } + return task.getStatus(); + } + + @Override + public Boolean stop(TaskExecuteContext context, String resource) { + return memoryTaskServer.stopTask(resource); + } + + private static class SplitterTaskCallable extends MemoryTaskServer.MemoryTaskCallable { + + private DefaultValue value; + + private BuilderJobService builderJobService; + + private ProjectService projectService; + + private ObjectStorageClient objectStorageClient; + + private TaskExecuteContext context; + + private List inputs; + + public SplitterTaskCallable( + DefaultValue value, + BuilderJobService builderJobService, + ProjectService projectService, + TaskExecuteContext context, + List inputs) { + this.value = value; + this.builderJobService = builderJobService; + this.projectService = projectService; + this.objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + this.context = context; + this.inputs = inputs; + } + + @Override + public String call() throws Exception { + List chunkList = Lists.newArrayList(); + addTraceLog("Start split document..."); + for (String input : inputs) { + String data = objectStorageClient.getString(value.getBuilderBucketName(), input); + List chunks = + JSON.parseObject(data, new TypeReference>() {}); + chunkList.addAll(splitterChunk(context, chunks)); + } + addTraceLog("Split document complete. number of paragraphs:%s", chunkList.size()); + SchedulerTask task = context.getTask(); + String fileKey = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + objectStorageClient.saveString( + value.getBuilderBucketName(), JSON.toJSONString(chunkList), fileKey); + addTraceLog( + "split result is stored bucket:%s file:%s", value.getBuilderBucketName(), fileKey); + return fileKey; + } + + public List splitterChunk( + TaskExecuteContext context, List chunks) { + List chunkList = Lists.newArrayList(); + JSONObject pyConfig = new JSONObject(); + Project project = projectService.queryById(context.getInstance().getProjectId()); + SchedulerJob job = context.getJob(); + BuilderJob builderJob = builderJobService.getById(Long.valueOf(job.getInvokerId())); + JSONObject extension = JSON.parseObject(builderJob.getExtension()); + PemjaConfig pemjaConfig = + com.antgroup.openspg.builder.core.physical.utils.CommonUtils.getSplitterConfig( + pyConfig, + value.getPythonExec(), + value.getPythonPaths(), + value.getSchemaUrlHost(), + project, + extension); + addTraceLog("invoke split operator:%s", pemjaConfig.getClassName()); + for (ChunkRecord.Chunk chunk : chunks) { + addTraceLog("invoke split chunk:%s", chunk.getName()); + Map map = new ObjectMapper().convertValue(chunk, Map.class); + List result = + (List) + PemjaUtils.invoke( + pemjaConfig, BuilderConstant.SPLITTER_ABC, pyConfig.toJSONString(), map); + List datas = + JSON.parseObject( + JSON.toJSONString(result), new TypeReference>() {}); + chunkList.addAll(datas); + addTraceLog("invoke split chunk:%s size:%s succeed", chunk.getName(), chunkList.size()); + } + addTraceLog("invoke split operator:%s succeed", pemjaConfig.getClassName()); + + return chunkList; + } + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagVectorizerAsyncTask.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagVectorizerAsyncTask.java new file mode 100644 index 000000000..0a171e7ef --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagVectorizerAsyncTask.java @@ -0,0 +1,225 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.task.async.builder; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.antgroup.openspg.builder.model.record.SubGraphRecord; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.common.util.pemja.PemjaUtils; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import com.antgroup.openspg.common.util.pemja.model.PemjaConfig; +import com.antgroup.openspg.server.common.model.CommonConstants; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.common.service.project.ProjectService; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteContext; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; +import com.antgroup.openspg.server.core.scheduler.service.common.MemoryTaskServer; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.task.async.AsyncTaskExecuteTemplate; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import java.util.List; +import java.util.Map; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component("kagVectorizerAsyncTask") +public class KagVectorizerAsyncTask extends AsyncTaskExecuteTemplate { + + @Autowired private DefaultValue value; + + @Autowired private MemoryTaskServer memoryTaskServer; + + @Autowired private SchedulerTaskService taskService; + + @Autowired private ProjectService projectService; + + @Override + public String submit(TaskExecuteContext context) { + SchedulerInstance instance = context.getInstance(); + SchedulerTask task = context.getTask(); + String key = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + SchedulerTask memoryTask = memoryTaskServer.getTask(key); + if (memoryTask != null) { + context.addTraceLog("Vectorizer task already exists; reuse it"); + return memoryTask.getNodeId(); + } + + List inputs = getInputs(instance, task); + String taskId = + memoryTaskServer.submit( + new VectorizerTaskCallable(value, projectService, context, inputs), key); + return taskId; + } + + private List getInputs(SchedulerInstance instance, SchedulerTask task) { + List nodes = + instance.getTaskDag().getRelatedNodes(task.getNodeId(), false); + List inputs = Lists.newArrayList(); + nodes.forEach( + node -> { + SchedulerTask preTask = + taskService.queryByInstanceIdAndNodeId(task.getInstanceId(), node.getId()); + if (preTask != null && StringUtils.isNotBlank(preTask.getOutput())) { + inputs.add(preTask.getOutput()); + } + }); + return inputs; + } + + @Override + public SchedulerEnum.TaskStatus getStatus(TaskExecuteContext context, String resource) { + SchedulerTask task = memoryTaskServer.getTask(resource); + SchedulerTask schedulerTask = context.getTask(); + if (task == null) { + context.addTraceLog("Vectorizer task(%s) not found, resubmit", resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + context.addTraceLog("Vectorizer task status is %s", task.getStatus()); + if (StringUtils.isNotBlank(task.getTraceLog())) { + context.addTraceLog( + "Vectorizer task traceLog:%s%s", + System.getProperty("line.separator"), task.getTraceLog()); + task.setTraceLog(""); + } + switch (task.getStatus()) { + case RUNNING: + break; + case ERROR: + int retryNum = 3; + if (schedulerTask.getExecuteNum() % retryNum == 0) { + context.addTraceLog("Vectorizer task(%s) status is ERROR, resubmit", resource); + memoryTaskServer.stopTask(resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + break; + case FINISH: + memoryTaskServer.stopTask(resource); + schedulerTask.setOutput(resource); + break; + default: + context.addTraceLog("Vectorizer Task Status is %s. Do nothing", task.getStatus()); + break; + } + return task.getStatus(); + } + + @Override + public Boolean stop(TaskExecuteContext context, String resource) { + return memoryTaskServer.stopTask(resource); + } + + private static class VectorizerTaskCallable extends MemoryTaskServer.MemoryTaskCallable { + + private DefaultValue value; + + private ObjectStorageClient objectStorageClient; + + private ProjectService projectService; + + private TaskExecuteContext context; + + private List inputs; + + public VectorizerTaskCallable( + DefaultValue value, + ProjectService projectService, + TaskExecuteContext context, + List inputs) { + this.value = value; + this.projectService = projectService; + this.objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + this.context = context; + this.inputs = inputs; + } + + @Override + public String call() throws Exception { + List subGraphList = Lists.newArrayList(); + addTraceLog("Start vectorizer task..."); + for (String input : inputs) { + String data = objectStorageClient.getString(value.getBuilderBucketName(), input); + List subGraphs = + JSON.parseObject(data, new TypeReference>() {}); + subGraphList.addAll(vectorizer(context, subGraphs)); + } + addTraceLog("vectorizer task complete..."); + SchedulerTask task = context.getTask(); + String fileKey = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + objectStorageClient.saveString( + value.getBuilderBucketName(), JSON.toJSONString(subGraphList), fileKey); + addTraceLog( + "vectorizer result is stored bucket:%s file:%s", value.getBuilderBucketName(), fileKey); + return fileKey; + } + + public List vectorizer( + TaskExecuteContext context, List subGraphs) { + List subGraphList = Lists.newArrayList(); + Long projectId = context.getInstance().getProjectId(); + String projectConfig = projectService.queryById(projectId).getConfig(); + JSONObject vec = + JSONObject.parseObject(projectConfig).getJSONObject(CommonConstants.VECTORIZER); + PythonInvokeMethod vectorizer = PythonInvokeMethod.BRIDGE_COMPONENT; + JSONObject pyConfig = new JSONObject(); + pyConfig.put(BuilderConstant.TYPE, BuilderConstant.BATCH); + pyConfig.put(BuilderConstant.VECTORIZE_MODEL, vec); + + PemjaConfig pemjaConfig = + new PemjaConfig( + value.getPythonExec(), + value.getPythonPaths(), + value.getSchemaUrlHost(), + projectId, + vectorizer, + Maps.newHashMap()); + for (SubGraphRecord subGraph : subGraphs) { + addTraceLog("invoke vectorizer processor operator:%s", pemjaConfig.getClassName()); + Map map = new ObjectMapper().convertValue(subGraph, Map.class); + List result = + (List) + PemjaUtils.invoke( + pemjaConfig, BuilderConstant.VECTORIZER_ABC, pyConfig.toJSONString(), map); + List records = + JSON.parseObject( + JSON.toJSONString(result), new TypeReference>() {}); + subGraphList.addAll(records); + for (SubGraphRecord subGraphRecord : records) { + addTraceLog( + "vectorizer processor succeed node:%s edge%s", + subGraphRecord.getResultNodes().size(), subGraphRecord.getResultEdges().size()); + } + } + return subGraphList; + } + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagWriterAsyncTask.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagWriterAsyncTask.java new file mode 100644 index 000000000..2d36b5a5a --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/async/builder/KagWriterAsyncTask.java @@ -0,0 +1,246 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.task.async.builder; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.TypeReference; +import com.antgroup.openspg.builder.core.runtime.BuilderContext; +import com.antgroup.openspg.builder.model.pipeline.config.Neo4jSinkNodeConfig; +import com.antgroup.openspg.builder.model.record.RecordAlterOperationEnum; +import com.antgroup.openspg.builder.model.record.SubGraphRecord; +import com.antgroup.openspg.builder.runner.local.physical.sink.impl.Neo4jSinkWriter; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.common.service.project.ProjectService; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteContext; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; +import com.antgroup.openspg.server.core.scheduler.service.common.MemoryTaskServer; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.task.async.AsyncTaskExecuteTemplate; +import com.google.common.collect.Lists; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component("kagWriterAsyncTask") +public class KagWriterAsyncTask extends AsyncTaskExecuteTemplate { + + @Autowired private DefaultValue value; + + @Autowired private ProjectService projectManager; + + @Autowired private MemoryTaskServer memoryTaskServer; + + @Autowired private SchedulerTaskService taskService; + + @Override + public String submit(TaskExecuteContext context) { + SchedulerInstance instance = context.getInstance(); + SchedulerTask task = context.getTask(); + String key = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + SchedulerTask memoryTask = memoryTaskServer.getTask(key); + if (memoryTask != null) { + context.addTraceLog("Writer task already exists; reuse it"); + return memoryTask.getNodeId(); + } + + List inputs = getInputs(instance, task); + String taskId = + memoryTaskServer.submit( + new WriterTaskCallable(value, projectManager, context, inputs), key); + return taskId; + } + + private List getInputs(SchedulerInstance instance, SchedulerTask task) { + List nodes = + instance.getTaskDag().getRelatedNodes(task.getNodeId(), false); + List inputs = Lists.newArrayList(); + nodes.forEach( + node -> { + SchedulerTask preTask = + taskService.queryByInstanceIdAndNodeId(task.getInstanceId(), node.getId()); + if (preTask != null && StringUtils.isNotBlank(preTask.getOutput())) { + inputs.add(preTask.getOutput()); + } + }); + return inputs; + } + + @Override + public SchedulerEnum.TaskStatus getStatus(TaskExecuteContext context, String resource) { + SchedulerTask task = memoryTaskServer.getTask(resource); + SchedulerTask schedulerTask = context.getTask(); + if (task == null) { + context.addTraceLog("Writer task(%s) not found, resubmit", resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + context.addTraceLog("Writer task status is %s", task.getStatus()); + if (StringUtils.isNotBlank(task.getTraceLog())) { + context.addTraceLog( + "Writer task traceLog:%s%s", System.getProperty("line.separator"), task.getTraceLog()); + task.setTraceLog(""); + } + switch (task.getStatus()) { + case RUNNING: + break; + case ERROR: + int retryNum = 3; + if (schedulerTask.getExecuteNum() % retryNum == 0) { + context.addTraceLog("Writer task(%s) status is ERROR, resubmit", resource); + memoryTaskServer.stopTask(resource); + submit(context); + context.addTraceLog("Async task resubmit successful!"); + return SchedulerEnum.TaskStatus.RUNNING; + } + break; + case FINISH: + memoryTaskServer.stopTask(resource); + schedulerTask.setOutput(resource); + removeInputs(context); + break; + default: + context.addTraceLog("Writer Task Status is %s. Do nothing", task.getStatus()); + break; + } + return task.getStatus(); + } + + public void removeInputs(TaskExecuteContext context) { + SchedulerInstance instance = context.getInstance(); + SchedulerTask task = context.getTask(); + List inputs = getInputs(instance, task); + ObjectStorageClient objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + for (String input : inputs) { + objectStorageClient.removeObject(value.getBuilderBucketName(), input); + } + } + + @Override + public Boolean stop(TaskExecuteContext context, String resource) { + return memoryTaskServer.stopTask(resource); + } + + private static class WriterTaskCallable extends MemoryTaskServer.MemoryTaskCallable { + + private DefaultValue value; + + private ProjectService projectManager; + + private ObjectStorageClient objectStorageClient; + + private TaskExecuteContext context; + + private List inputs; + + private static final String VECTOR = "_vector"; + + public WriterTaskCallable( + DefaultValue value, + ProjectService projectManager, + TaskExecuteContext context, + List inputs) { + this.value = value; + this.projectManager = projectManager; + this.objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + this.context = context; + this.inputs = inputs; + } + + @Override + public String call() throws Exception { + List subGraphList = Lists.newArrayList(); + addTraceLog("Start write task..."); + for (String input : inputs) { + String data = objectStorageClient.getString(value.getBuilderBucketName(), input); + List subGraphs = + JSON.parseObject(data, new TypeReference>() {}); + writer(value, projectManager, context, subGraphs); + subGraphList.addAll(simpleSubGraph(subGraphs)); + } + addTraceLog("Writer task complete..."); + SchedulerTask task = context.getTask(); + String fileKey = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + objectStorageClient.saveString( + value.getBuilderBucketName(), JSON.toJSONString(subGraphList), fileKey); + addTraceLog( + "Writer result is stored bucket:%s file:%s", value.getBuilderBucketName(), fileKey); + return fileKey; + } + + public List simpleSubGraph(List subGraphs) { + for (SubGraphRecord subGraph : subGraphs) { + subGraph + .getResultNodes() + .forEach( + node -> { + Iterator> iterator = + node.getProperties().entrySet().iterator(); + while (iterator.hasNext()) { + Map.Entry entry = iterator.next(); + if (entry.getKey().endsWith(VECTOR)) { + iterator.remove(); + } + } + }); + } + return subGraphs; + } + + public void writer( + DefaultValue value, + ProjectService projectManager, + TaskExecuteContext context, + List subGraphs) { + Neo4jSinkWriter writer = + new Neo4jSinkWriter( + UUID.randomUUID().toString(), "writer", new Neo4jSinkNodeConfig(true)); + BuilderContext builderContext = + new BuilderContext() + .setProjectId(context.getInstance().getProjectId()) + .setJobName("writer") + .setPythonExec(value.getPythonExec()) + .setPythonPaths(value.getPythonPaths()) + .setOperation(RecordAlterOperationEnum.UPSERT) + .setEnableLeadTo(false) + .setProject( + JSON.toJSONString(projectManager.queryById(context.getInstance().getProjectId()))) + .setGraphStoreUrl( + projectManager.getGraphStoreUrl(context.getInstance().getProjectId())); + writer.init(builderContext); + for (SubGraphRecord subGraph : subGraphs) { + addTraceLog("Start Writer processor..."); + writer.writeToNeo4j(subGraph); + addTraceLog( + "Writer processor succeed node:%s edge%s", + subGraph.getResultNodes().size(), subGraph.getResultEdges().size()); + } + } + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/sync/builder/KagReaderSyncTask.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/sync/builder/KagReaderSyncTask.java new file mode 100644 index 000000000..23fec477f --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/task/sync/builder/KagReaderSyncTask.java @@ -0,0 +1,88 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.task.sync.builder; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.builder.model.record.ChunkRecord; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.common.util.pemja.PythonInvokeMethod; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.project.Project; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.builder.BuilderJobService; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.common.service.project.ProjectService; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteContext; +import com.antgroup.openspg.server.core.scheduler.service.task.sync.SyncTaskExecuteTemplate; +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component("kagReaderSyncTask") +public class KagReaderSyncTask extends SyncTaskExecuteTemplate { + + @Autowired private DefaultValue value; + + @Autowired private BuilderJobService builderJobService; + + @Autowired private ProjectService projectService; + + private ObjectStorageClient objectStorageClient; + + @Override + public SchedulerEnum.TaskStatus submit(TaskExecuteContext context) { + SchedulerJob job = context.getJob(); + BuilderJob builderJob = builderJobService.getById(Long.valueOf(job.getInvokerId())); + String url = builderJob.getFileUrl(); + JSONObject config = + JSON.parseObject(builderJob.getExtension()).getJSONObject(BuilderConstant.YU_QUE_CONFIG); + String token = config == null ? null : config.getString(BuilderConstant.TOKEN); + List chunks = readSource(context, url, token); + SchedulerTask task = context.getTask(); + String fileKey = + CommonUtils.getTaskStorageFileKey( + task.getProjectId(), task.getInstanceId(), task.getId(), task.getType()); + objectStorageClient = ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + objectStorageClient.saveString( + value.getBuilderBucketName(), JSON.toJSONString(chunks), fileKey); + context.addTraceLog( + "The read Chunks is stored bucket:%s file:%s", value.getBuilderBucketName(), fileKey); + task.setOutput(fileKey); + return SchedulerEnum.TaskStatus.FINISH; + } + + public List readSource(TaskExecuteContext context, String url, String token) { + Long projectId = context.getInstance().getProjectId(); + Project project = projectService.queryById(projectId); + context.addTraceLog("invoke read operator:%s", PythonInvokeMethod.BRIDGE_READER.getMethod()); + List chunkList = + com.antgroup.openspg.builder.core.physical.utils.CommonUtils.readSource( + value.getPythonExec(), + value.getPythonPaths(), + value.getSchemaUrlHost(), + project, + url, + token); + context.addTraceLog( + "invoke read operator:%s chunks:%s succeed", + PythonInvokeMethod.BRIDGE_READER.getMethod(), chunkList.size()); + + return chunkList; + } +} diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/Translate.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/Translate.java index a43ad78be..36207d02e 100644 --- a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/Translate.java +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/Translate.java @@ -12,6 +12,8 @@ */ package com.antgroup.openspg.server.core.scheduler.service.translate; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; @@ -25,4 +27,13 @@ public interface Translate { * @return */ TaskExecuteDag translate(SchedulerJob job); + + /** + * Status update callback + * + * @param job + * @return + */ + void statusCallback( + SchedulerJob job, SchedulerInstance instance, SchedulerEnum.InstanceStatus instanceStatus); } diff --git a/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/builder/KagBuilderTranslate.java b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/builder/KagBuilderTranslate.java new file mode 100644 index 000000000..0ac7a6564 --- /dev/null +++ b/server/core/scheduler/service/src/main/java/com/antgroup/openspg/server/core/scheduler/service/translate/builder/KagBuilderTranslate.java @@ -0,0 +1,147 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.core.scheduler.service.translate.builder; + +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClient; +import com.antgroup.openspg.cloudext.interfaces.objectstorage.ObjectStorageClientDriverManager; +import com.antgroup.openspg.common.constants.BuilderConstant; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.common.service.builder.BuilderJobService; +import com.antgroup.openspg.server.common.service.config.DefaultValue; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; +import com.antgroup.openspg.server.core.scheduler.service.metadata.SchedulerTaskService; +import com.antgroup.openspg.server.core.scheduler.service.translate.Translate; +import com.google.common.collect.Lists; +import java.util.List; +import java.util.UUID; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component("kagBuilderTranslate") +public class KagBuilderTranslate implements Translate { + + @Autowired private BuilderJobService builderJobService; + + @Autowired private SchedulerTaskService taskService; + + @Autowired private DefaultValue value; + + @Override + public TaskExecuteDag translate(SchedulerJob schedulerJob) { + return getTaskDag(); + } + + @Override + public void statusCallback( + SchedulerJob job, SchedulerInstance instance, SchedulerEnum.InstanceStatus instanceStatus) { + BuilderJob builderJob = new BuilderJob(); + builderJob.setId(Long.valueOf(job.getInvokerId())); + builderJob.setStatus(instanceStatus.name()); + builderJobService.update(builderJob); + if (SchedulerEnum.InstanceStatus.isFinished(instanceStatus)) { + ObjectStorageClient objectStorageClient = + ObjectStorageClientDriverManager.getClient(value.getObjectStorageUrl()); + List tasks = taskService.queryByInstanceId(instance.getId()); + for (SchedulerTask task : tasks) { + if (BuilderConstant.KAG_WRITER_ASYNC_TASK.equalsIgnoreCase(task.getType())) { + continue; + } + objectStorageClient.removeObject(value.getBuilderBucketName(), task.getOutput()); + } + } + } + + /** get KAG Builder TaskDag */ + public TaskExecuteDag getTaskDag() { + + List nodes = Lists.newArrayList(); + List edges = Lists.newArrayList(); + + TaskExecuteDag taskDag = new TaskExecuteDag(); + TaskExecuteDag.Node reader = new TaskExecuteDag.Node(); + String readerId = UUID.randomUUID().toString(); + reader.setId(readerId); + reader.setName("Reader"); + reader.setTaskComponent("kagReaderSyncTask"); + nodes.add(reader); + + TaskExecuteDag.Node splitter = new TaskExecuteDag.Node(); + String splitterId = UUID.randomUUID().toString(); + splitter.setId(splitterId); + splitter.setName("Splitter"); + splitter.setTaskComponent("kagSplitterAsyncTask"); + nodes.add(splitter); + + TaskExecuteDag.Node extractor = new TaskExecuteDag.Node(); + String extractorId = UUID.randomUUID().toString(); + extractor.setId(extractorId); + extractor.setName("Extractor"); + extractor.setTaskComponent("kagExtractorAsyncTask"); + nodes.add(extractor); + + TaskExecuteDag.Node vectorizer = new TaskExecuteDag.Node(); + String vectorizerId = UUID.randomUUID().toString(); + vectorizer.setId(vectorizerId); + vectorizer.setName("Vectorizer"); + vectorizer.setTaskComponent("kagVectorizerAsyncTask"); + nodes.add(vectorizer); + + TaskExecuteDag.Node alignment = new TaskExecuteDag.Node(); + String alignmentId = UUID.randomUUID().toString(); + alignment.setId(alignmentId); + alignment.setName("Alignment"); + alignment.setTaskComponent("kagAlignmentAsyncTask"); + nodes.add(alignment); + + TaskExecuteDag.Node writer = new TaskExecuteDag.Node(); + String writerId = UUID.randomUUID().toString(); + writer.setId(writerId); + writer.setName("Writer"); + writer.setTaskComponent("kagWriterAsyncTask"); + nodes.add(writer); + + TaskExecuteDag.Edge edge = new TaskExecuteDag.Edge(); + edge.setFrom(readerId); + edge.setTo(splitterId); + edges.add(edge); + + TaskExecuteDag.Edge edge1 = new TaskExecuteDag.Edge(); + edge1.setFrom(splitterId); + edge1.setTo(extractorId); + edges.add(edge1); + + TaskExecuteDag.Edge edge2 = new TaskExecuteDag.Edge(); + edge2.setFrom(extractorId); + edge2.setTo(vectorizerId); + edges.add(edge2); + + TaskExecuteDag.Edge edge3 = new TaskExecuteDag.Edge(); + edge3.setFrom(vectorizerId); + edge3.setTo(alignmentId); + edges.add(edge3); + + TaskExecuteDag.Edge edge4 = new TaskExecuteDag.Edge(); + edge4.setFrom(alignmentId); + edge4.setTo(writerId); + edges.add(edge4); + + taskDag.setNodes(nodes); + taskDag.setEdges(edges); + + return taskDag; + } +} diff --git a/server/core/schema/model/src/main/java/com/antgroup/openspg/core/schema/model/predicate/IndexTypeEnum.java b/server/core/schema/model/src/main/java/com/antgroup/openspg/core/schema/model/predicate/IndexTypeEnum.java index e07d020b8..2dee3793a 100644 --- a/server/core/schema/model/src/main/java/com/antgroup/openspg/core/schema/model/predicate/IndexTypeEnum.java +++ b/server/core/schema/model/src/main/java/com/antgroup/openspg/core/schema/model/predicate/IndexTypeEnum.java @@ -17,19 +17,23 @@ public enum IndexTypeEnum { /** vector index. */ - VECTOR("VECTOR"), + VECTOR("VECTOR", "Vector"), /** text index. */ - TEXT("TEXT"), + TEXT("TEXT", "Text"), /** text and vector index. */ - TEXT_AND_VECTOR("TEXT_AND_VECTOR"); + TEXT_AND_VECTOR("TEXT_AND_VECTOR", "TextAndVector"); /** Name of index. */ private final String nameEn; - IndexTypeEnum(String nameEn) { + /** Name of index. */ + private final String scriptName; + + IndexTypeEnum(String nameEn, String scriptName) { this.nameEn = nameEn; + this.scriptName = scriptName; } public static IndexTypeEnum toEnum(String name) { @@ -41,7 +45,20 @@ public static IndexTypeEnum toEnum(String name) { return null; } + public static IndexTypeEnum getByScriptName(String scriptName) { + for (IndexTypeEnum indexTypeEnum : IndexTypeEnum.values()) { + if (indexTypeEnum.getScriptName().equalsIgnoreCase(scriptName)) { + return indexTypeEnum; + } + } + throw new IllegalArgumentException("unknown type: " + scriptName); + } + public String getNameEn() { return nameEn; } + + public String getScriptName() { + return scriptName; + } } diff --git a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/RelationService.java b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/RelationService.java index 037f5ab60..d4355f2b4 100644 --- a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/RelationService.java +++ b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/RelationService.java @@ -53,4 +53,12 @@ public interface RelationService { * @return list of relation type */ List queryBySubjectId(List subjectIds); + + /** + * Query relation type by spg unique id. + * + * @param uniqueIds list of spg unique id + * @return list of relation type + */ + List queryByUniqueId(List uniqueIds); } diff --git a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/impl/RelationServiceImpl.java b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/impl/RelationServiceImpl.java index 9b9373b1d..23484e81b 100644 --- a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/impl/RelationServiceImpl.java +++ b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/predicate/impl/RelationServiceImpl.java @@ -149,4 +149,46 @@ public List queryBySubjectId(List subjectIds) { return PredicateAssemble.toRelation( simplePredicates, spgTypes, subProperties, semantics, logicalRules); } + + @Override + public List queryByUniqueId(List uniqueIds) { + if (CollectionUtils.isEmpty(uniqueIds)) { + return Collections.emptyList(); + } + List simplePredicates = + propertyRepository.queryByUniqueId(uniqueIds, SPGOntologyEnum.RELATION); + if (CollectionUtils.isEmpty(simplePredicates)) { + return Collections.emptyList(); + } + + Set spgTypeIds = new HashSet<>(); + spgTypeIds.addAll( + simplePredicates.stream() + .map(e -> e.getSubjectTypeId().getUniqueId()) + .collect(Collectors.toList())); + spgTypeIds.addAll( + simplePredicates.stream() + .map(e -> e.getObjectTypeId().getUniqueId()) + .collect(Collectors.toList())); + List spgTypes = + simpleSpgTypeRepository.queryByUniqueId(Lists.newArrayList(spgTypeIds)); + + List relationIds = + simplePredicates.stream().map(SimpleProperty::getUniqueId).collect(Collectors.toList()); + List subProperties = + subPropertyService.queryBySubjectId(relationIds, SPGOntologyEnum.RELATION); + + List semantics = + semanticService.queryBySubjectIds(relationIds, SPGOntologyEnum.RELATION); + + List ruleCodes = + simplePredicates.stream() + .map(SimpleProperty::getRuleCode) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + List logicalRules = logicalRuleService.queryByRuleCode(ruleCodes); + + return PredicateAssemble.toRelation( + simplePredicates, spgTypes, subProperties, semantics, logicalRules); + } } diff --git a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/SPGTypeService.java b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/SPGTypeService.java index 449ab04be..e699c80f8 100644 --- a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/SPGTypeService.java +++ b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/SPGTypeService.java @@ -14,9 +14,12 @@ package com.antgroup.openspg.server.core.schema.service.type; import com.antgroup.openspg.core.schema.model.identifier.SPGTypeIdentifier; +import com.antgroup.openspg.core.schema.model.predicate.Relation; +import com.antgroup.openspg.core.schema.model.semantic.SPGOntologyEnum; import com.antgroup.openspg.core.schema.model.type.BaseAdvancedType; import com.antgroup.openspg.core.schema.model.type.BaseSPGType; import com.antgroup.openspg.core.schema.model.type.ProjectSchema; +import com.antgroup.openspg.server.core.schema.service.predicate.model.SimpleProperty; import java.util.List; import java.util.Set; @@ -77,6 +80,23 @@ public interface SPGTypeService { */ List querySPGTypeById(List uniqueIds); + /** + * Query relation type by spg unique id. + * + * @param uniqueIds list of spg unique id + * @return list of relation type + */ + List queryRelationByUniqueId(List uniqueIds); + + /** + * Query base type by spg unique id. + * + * @param uniqueIds list of spg unique id + * @param ontologyEnum + * @return list of relation type + */ + List queryPropertyByUniqueId(List uniqueIds, SPGOntologyEnum ontologyEnum); + /** * Query name of standard type that is spreadable, such as STD.ChinaMobile, STD.IdentifyCard * diff --git a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/impl/SPGTypeServiceImpl.java b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/impl/SPGTypeServiceImpl.java index 32dcedce2..b8c41a4f0 100644 --- a/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/impl/SPGTypeServiceImpl.java +++ b/server/core/schema/service/src/main/java/com/antgroup/openspg/server/core/schema/service/type/impl/SPGTypeServiceImpl.java @@ -25,6 +25,8 @@ import com.antgroup.openspg.core.schema.model.type.WithAlterOperation; import com.antgroup.openspg.server.core.schema.service.predicate.PropertyService; import com.antgroup.openspg.server.core.schema.service.predicate.RelationService; +import com.antgroup.openspg.server.core.schema.service.predicate.model.SimpleProperty; +import com.antgroup.openspg.server.core.schema.service.predicate.repository.PropertyRepository; import com.antgroup.openspg.server.core.schema.service.type.SPGTypeService; import com.antgroup.openspg.server.core.schema.service.type.convertor.SPGTypeAssemble; import com.antgroup.openspg.server.core.schema.service.type.convertor.SPGTypeConvertor; @@ -53,6 +55,7 @@ public class SPGTypeServiceImpl implements SPGTypeService { @Autowired private SPGTypeRepository spgTypeRepository; @Autowired private PropertyService propertyService; @Autowired private RelationService relationService; + @Autowired private PropertyRepository propertyRepository; @Autowired private ProjectOntologyRelRepository projectOntologyRelRepository; @Override @@ -161,6 +164,20 @@ public List querySPGTypeById(List uniqueIds) { return spgTypes; } + @Override + public List queryRelationByUniqueId(List uniqueIds) { + if (CollectionUtils.isEmpty(uniqueIds)) { + return Collections.emptyList(); + } + return relationService.queryByUniqueId(uniqueIds); + } + + @Override + public List queryPropertyByUniqueId( + List uniqueIds, SPGOntologyEnum ontologyEnum) { + return propertyRepository.queryByUniqueId(uniqueIds, ontologyEnum); + } + private List queryCustomizedType(Long projectId) { List standardTypes = spgTypeRepository.queryAllStandardType(); List simpleSpgTypes = spgTypeRepository.queryByProject(projectId); diff --git a/server/infra/dao/pom.xml b/server/infra/dao/pom.xml index eaa0ec825..7c89e3e46 100644 --- a/server/infra/dao/pom.xml +++ b/server/infra/dao/pom.xml @@ -24,6 +24,10 @@ infra-dao + + com.antgroup.openspgapp + common-util + com.antgroup.openspg common-util @@ -32,6 +36,10 @@ com.antgroup.openspg.server core-schema-service + + com.antgroup.openspg.server + core-scheduler-service + com.antgroup.openspg.server common-service diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/AccountDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/AccountDO.java new file mode 100644 index 000000000..205f6b71e --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/AccountDO.java @@ -0,0 +1,72 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Data; + +/** This model corresponds to the database table: kg_user Database Table Remarks: */ +@Data +public class AccountDO { + /** primary key */ + private Long id; + + /** create time */ + private Date gmtCreate; + + /** update time */ + private Date gmtModified; + + /** userNo */ + private String userNo; + + /** token */ + private String token; + + /** Modified Token Before Modification */ + private String lastToken; + + /** Random String */ + private String salt; + + /** token update time */ + private Date gmtLastTokenDisable; + + /** data userId */ + private String dwAccessId; + + /** data userKey */ + private String dwAccessKey; + + /** account real name */ + private String realName; + + /** account nick name */ + private String nickName; + + /** account email */ + private String email; + + /** account login account */ + private String domainAccount; + + /** account mobile */ + private String mobile; + + /** account wechat id */ + private String wxAccount; + + /** config */ + private String config; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/BuilderJobDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/BuilderJobDO.java new file mode 100644 index 000000000..b2d0e6061 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/BuilderJobDO.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class BuilderJobDO { + + private Long id; + private Long projectId; + private Date gmtCreate; + private Date gmtModified; + private String modifyUser; + private String createUser; + private Long taskId; + private String jobName; + private Long chunkNum; + private String fileUrl; + private String status; + private String type; + private String extension; + private String version; + private String computingConf; + private String lifeCycle; + private String action; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/ConfigDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/ConfigDO.java new file mode 100644 index 000000000..41b44fa5c --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/ConfigDO.java @@ -0,0 +1,60 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Data; + +/** This model corresponds to the database table: kg_config Database Table Remarks: */ +@Data +public class ConfigDO { + /** primary key */ + private Long id; + + /** create time */ + private Date gmtCreate; + + /** update time */ + private Date gmtModified; + + /** creator userNo */ + private String userNo; + + /** Project ID, which can be a unique value for a certain domain. */ + private String projectId; + + /** config name */ + private String configName; + + /** config id */ + private String configId; + + /** config version */ + private String version; + + /** Status, 1: Offline status (default) 2: Online */ + private Integer status; + + /** config,json */ + private String config; + + /** version description */ + private String description; + + /** Resource ID, used for foreign key association with the schem view. */ + private String resourceId; + + /** resource type */ + private String resourceType; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/DataSourceDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/DataSourceDO.java new file mode 100644 index 000000000..154d49f5a --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/DataSourceDO.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class DataSourceDO { + + private Long id; + private Date gmtCreate; + private Date gmtModified; + private String createUser; + private String updateUser; + private String status; + private String remark; + private String type; + private String dbName; + private String dbUrl; + private String dbUser; + private String dbPassword; + private String encrypt; + private String dbDriverName; + private String category; + private String connectionInfo; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/PermissionDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/PermissionDO.java new file mode 100644 index 000000000..d687bc153 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/PermissionDO.java @@ -0,0 +1,50 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +/** This model corresponds to the database table: kg_user_resource_role Database Table Remarks: */ +@Getter +@Setter +public class PermissionDO { + /** primary key */ + private Long id; + + /** create time */ + private Date gmtCreate; + + /** update time */ + private Date gmtModified; + + /** userNo */ + private String userNo; + + /** resource id */ + private Long resourceId; + + /** role id */ + private Long roleId; + + /** resource tag */ + private String resourceTag; + + /** Status. -1: Rejected; 99: Under Approval; 1: Effective; 9: Deleted */ + private String status; + + /** expire date */ + private Date expireDate; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInfoDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInfoDO.java new file mode 100644 index 000000000..8bc0e5d8a --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInfoDO.java @@ -0,0 +1,52 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerInfoDO { + + /** primary key */ + private Long id; + + /** Create time */ + private Date gmtCreate; + + /** Modified time */ + private Date gmtModified; + + /** name */ + private String name; + + /** status */ + private String status; + + /** Scheduler period Unit: second */ + private Long period; + + /** execute count */ + private Integer count; + + /** log */ + private String log; + + /** config */ + private String config; + + /** lock Time */ + private Date lockTime; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInstanceDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInstanceDO.java new file mode 100644 index 000000000..0d71c64ea --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerInstanceDO.java @@ -0,0 +1,76 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerInstanceDO { + + /** primary key */ + private Long id; + + /** unique id = jobId+yyyyMMddHHmmss */ + private String uniqueId; + + /** project id */ + private Long projectId; + + /** SchedulerJobDO Id */ + private Long jobId; + + /** instance type */ + private String type; + + /** status */ + private String status; + + /** progress [0-100] */ + private Long progress; + + /** create User */ + private String createUser; + + /** create time */ + private Date gmtCreate; + + /** modify time */ + private Date gmtModified; + + /** instance begin Running Time */ + private Date beginRunningTime; + + /** instance finish Time */ + private Date finishTime; + + /** job Life Cycle:PERIOD,ONCE,REAL_TIME Enum:LifeCycle */ + private String lifeCycle; + + /** Dependent pre task completion */ + private String dependence; + + /** scheduler Date */ + private Date schedulerDate; + + /** version */ + private String version; + + /** extension */ + private String extension; + + /** task dag Config */ + private String taskDag; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerJobDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerJobDO.java new file mode 100644 index 000000000..2a5f9847a --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerJobDO.java @@ -0,0 +1,71 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.infra.dao.dataobject; + +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.Status; +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerJobDO { + + /** primary key */ + private Long id; + + /** createUser */ + private String createUser; + + /** modifyUser */ + private String modifyUser; + + /** Create time */ + private Date gmtCreate; + + /** Modified time */ + private Date gmtModified; + + /** project id */ + private Long projectId; + + /** job name */ + private String name; + + /** job Life Cycle:PERIOD,ONCE,REAL_TIME */ + private String lifeCycle; + + /** translate type */ + private String translateType; + + /** job Status:ENABLE,DISABLE */ + private Status status; + + /** Dependent pre task completion */ + private String dependence; + + /** Scheduler Cron expression default:0 0 0 * * ? */ + private String schedulerCron; + + /** last Execute Time */ + private Date lastExecuteTime; + + /** invoker id, Primary key of the service table that triggers scheduler */ + private String invokerId; + + /** extension */ + private String extension; + + /** version */ + private String version; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerTaskDO.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerTaskDO.java new file mode 100644 index 000000000..26b8eb57a --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/dataobject/SchedulerTaskDO.java @@ -0,0 +1,82 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ +package com.antgroup.openspg.server.infra.dao.dataobject; + +import java.util.Date; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class SchedulerTaskDO { + + /** primary key */ + private Long id; + + /** Create time */ + private Date gmtCreate; + + /** Modified time */ + private Date gmtModified; + + /** type */ + private String type; + + /** title */ + private String title; + + /** status */ + private String status; + + /** project id */ + private Long projectId; + + /** SchedulerJobDO Id */ + private Long jobId; + + /** instance id */ + private Long instanceId; + + /** execute Num */ + private Integer executeNum; + + /** execute begin Time */ + private Date beginTime; + + /** execute finish Time */ + private Date finishTime; + + /** estimate Finish Time */ + private Date estimateFinishTime; + + /** traceLog */ + private String traceLog; + + /** lock Time */ + private Date lockTime; + + /** resource */ + private String resource; + + /** input */ + private String input; + + /** output */ + private String output; + + /** node id */ + private String nodeId; + + /** extension,JSON */ + private String extension; +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/AccountMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/AccountMapper.java new file mode 100644 index 000000000..b331361c2 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/AccountMapper.java @@ -0,0 +1,112 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.infra.dao.dataobject.AccountDO; +import java.util.Collection; +import java.util.List; +import org.apache.ibatis.annotations.Param; + +public interface AccountMapper { + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_user + * + * @param record com.antgroup.openspg.server.infra.dao.dataobject.AccountDO + * @return int + */ + int insert(AccountDO record); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_user + * + * @param record com.antgroup.openspg.server.infra.dao.dataobject.AccountDO + * @return int + */ + int updateByPrimaryKeySelective(AccountDO record); + + /** + * update by userNo + * + * @param record com.antgroup.openspg.server.infra.dao.dataobject.AccountDO + * @return int + */ + int updateByUserNo(AccountDO record); + + /** + * delete by userNo + * + * @param userNo java.lang.String + * @return int + * @param userNo + * @return + */ + int deleteByUserNo(@Param("userNo") String userNo); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_user + * + * @param id java.lang.Long + * @return com.antgroup.openspg.server.infra.dao.dataobject.AccountDO + */ + AccountDO selectByPrimaryKey(Long id); + + AccountDO getByUserId(String userNo); + + /** + * batch get simple user by userNo list + * + * @param userNos + * @return + */ + List getSimpleAccountByUserNoList(@Param("userNos") Collection userNos); + + /** + * get user by userNo or domainNo or name + * + * @param keyWord + * @return + */ + List getUserLikeUserNoOrDomainNoOrName(@Param("keyWord") String keyWord); + + /** + * select count account by condition + * + * @param accountDO + * @return + */ + Integer selectCountByCondition(AccountDO accountDO); + + /** + * select account by condition + * + * @param accountDO + * @param start + * @param size + * @return + */ + List selectByCondition( + @Param("record") AccountDO accountDO, @Param("start") int start, @Param("size") int size); + + /** + * update user config + * + * @param userNo + * @param config + * @return + */ + int updateUserConfig(@Param("userNo") String userNo, @Param("config") String config); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/BuilderJobDOMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/BuilderJobDOMapper.java new file mode 100644 index 000000000..adcef32be --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/BuilderJobDOMapper.java @@ -0,0 +1,33 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.common.model.bulider.BuilderJobQuery; +import com.antgroup.openspg.server.infra.dao.dataobject.BuilderJobDO; +import java.util.List; + +public interface BuilderJobDOMapper { + + Long insert(BuilderJobDO record); + + int deleteById(Long id); + + Long update(BuilderJobDO record); + + BuilderJobDO getById(Long id); + + List query(BuilderJobQuery record); + + int selectCountByQuery(BuilderJobQuery record); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ConfigMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ConfigMapper.java new file mode 100644 index 000000000..61f57f537 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ConfigMapper.java @@ -0,0 +1,113 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.infra.dao.dataobject.ConfigDO; +import java.util.List; +import org.apache.ibatis.annotations.Param; + +/** @version ConfigMapper.java, v 0.1 2023年07月10日 下午5:46 */ +public interface ConfigMapper { + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param record com.alipay.kgmng.common.dal.mybatis.po.KgConfigPO + * @return int + */ + int insert(ConfigDO record); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param id java.lang.Long + * @return int + */ + int deleteByPrimaryKey(Long id); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param ids List + * @return int + */ + int deleteByIds(@Param("ids") List ids); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param record com.alipay.kgmng.common.dal.mybatis.po.KgConfigPO + * @return int + */ + int updateByPrimaryKeySelective(ConfigDO record); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param id java.lang.Long + * @return com.alipay.kgmng.common.dal.mybatis.po.KgConfigPO + */ + ConfigDO selectByPrimaryKey(Long id); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param record com.alipay.kgmng.common.dal.mybatis.po.KgConfigPO + * @param start int + * @param size int + * @return List + */ + List selectByCondition( + @Param("record") ConfigDO record, + @Param("keyword") String keyword, + @Param("exclude") List excludeType, + @Param("start") int start, + @Param("size") int size); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param record com.alipay.kgmng.common.dal.mybatis.po.KgConfigPO + * @return int + */ + int selectCountByCondition( + @Param("record") ConfigDO record, + @Param("keyword") String keyword, + @Param("exclude") List excludeType); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_config + * + * @param ids List + * @return List + */ + List selectByIds(@Param("ids") List ids); + + /** + * select by configId and version + * + * @param configId + * @param version + * @return + */ + ConfigDO selectByConfigIdAndVersion( + @Param("configId") String configId, @Param("version") String version); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/DataSourceDOMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/DataSourceDOMapper.java new file mode 100644 index 000000000..2ff5206a5 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/DataSourceDOMapper.java @@ -0,0 +1,35 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.common.model.datasource.DataSourceQuery; +import com.antgroup.openspg.server.infra.dao.dataobject.DataSourceDO; +import java.util.List; + +public interface DataSourceDOMapper { + + Long insert(DataSourceDO record); + + int deleteById(Long id); + + Long update(DataSourceDO record); + + DataSourceDO getById(Long id); + + List query(DataSourceQuery record); + + int selectCountByQuery(DataSourceQuery record); + + List getGroupByType(DataSourceQuery record); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/PermissionMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/PermissionMapper.java new file mode 100644 index 000000000..b719588ac --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/PermissionMapper.java @@ -0,0 +1,277 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO; +import java.util.List; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +/** + * Permission mapper + * + * @author nanmu + */ +@Mapper +public interface PermissionMapper { + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param record com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO + * @return int + */ + int insert(PermissionDO record); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param id java.lang.Long + * @return int + */ + int deleteByPrimaryKey(Long id); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param ids List + * @return int + */ + int deleteByIds(@Param("ids") List ids); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param record com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO + * @return int + */ + int updateByPrimaryKeySelective(PermissionDO record); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param id java.lang.Long + * @return com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO + */ + PermissionDO selectByPrimaryKey(Long id); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param record com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO + * @param start int + * @param size int + * @return List + */ + List selectByCondition( + @Param("record") PermissionDO record, @Param("start") int start, @Param("size") int size); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param record com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO + * @return int + */ + int selectCountByCondition(PermissionDO record); + + /** + * This method was generated by MyBatis Generator. This method corresponds to the database table + * kg_resource_permission + * + * @param ids List + * @return List + */ + List selectByIds(@Param("ids") List ids); + + /** + * Batch Insert Data + * + * @param records + * @return + */ + int batchInsert(@Param("records") List records); + + /** + * update KgUserResourceRole + * + * @param record + * @return + */ + int updateResourceRole(@Param("record") PermissionDO record); + + /** + * delete by resourceId and userNo + * + * @param record + * @return + */ + int deleteByResourceIdAndUserNo(@Param("record") PermissionDO record); + + /** + * select all by condition + * + * @param record + * @return + */ + List selectAllByCondition(@Param("record") PermissionDO record); + + /** + * select by userNo and resourceId and resourceTag for update + * + * @param userNo + * @param resourceId + * @param resourceTag + * @return + */ + PermissionDO selectUserTypeRoleForUpdate( + @Param("userNo") String userNo, + @Param("resourceId") Long resourceId, + @Param("resourceTag") String resourceTag); + + /** + * Query Records with Specified Permissions + * + * @param userId + * @param roleIds + * @param resourceTag + * @return + */ + List selectByRoleId( + @Param("userId") String userId, + @Param("roleIds") List roleIds, + @Param("resourceTag") String resourceTag); + + /** + * Delete Records Corresponding to the Resource + * + * @param resourceId + * @param resourceTag + * @return + */ + int deleteByResourceId( + @Param("resourceId") Long resourceId, @Param("resourceTag") String resourceTag); + + int deletePermission(PermissionDO record); + + /** + * Batch Query Asset Authorization Information + * + * @param resourceIds + * @param resourceTag + * @return + */ + List selectByResources( + @Param("resourceIds") List resourceIds, @Param("resourceTag") String resourceTag); + + /** + * select by userNo and resourceTag and roleIds + * + * @param userNo + * @param roleName + * @return + */ + List getResourceIdByUserNoAndRoleName( + @Param("userNo") String userNo, + @Param("resourceTag") String resourceTag, + @Param("roleName") String roleName); + + /** + * select by userNo and resourceTag and roleIds + * + * @param resourceIds + * @return + */ + List getByResourceIds( + @Param("resourceIds") List resourceIds, @Param("roleId") Long roleId); + + /** + * delete by resourceIds + * + * @param resourceIds + * @return + */ + int deleteByResourceIds(@Param("resourceIds") List resourceIds); + + /** + * select page + * + * @param userNo + * @param roleId + * @param resourceTag + * @return + */ + List selectLikeByUserNoAndRoleId( + @Param("userNo") String userNo, + @Param("roleId") Long roleId, + @Param("resourceId") Long resourceId, + @Param("resourceTag") String resourceTag, + @Param("start") int start, + @Param("size") int size); + + /** + * the count of selectLikeByUserNoAndRoleId + * + * @param userNo + * @param roleId + * @param resourceId + * @param resourceTag + * @return + */ + Integer selectLikeCountByUserNoAndRoleId( + @Param("userNo") String userNo, + @Param("roleId") Long roleId, + @Param("resourceId") Long resourceId, + @Param("resourceTag") String resourceTag); + + /** + * get by resourceIds and resourceTag + * + * @param resourceIds + * @param resourceTag + * @return + */ + List selectByResourceIdsAndResourceTag( + @Param("resourceIds") List resourceIds, @Param("resourceTag") String resourceTag); + + /** + * get user has permission permission + * + * @param resourceIds + * @param userNo + * @param roleId + * @param resourceTag + * @return + */ + List getPermissionByUserRolesAndId( + @Param("resourceIds") List resourceIds, + @Param("userNo") String userNo, + @Param("roleId") Long roleId, + @Param("resourceTag") String resourceTag); + + /** + * get by userNo and resourceTag + * + * @param userNo + * @param resourceTag + * @return + */ + List getPermissionByUserNoAndResourceTag( + @Param("userNo") String userNo, @Param("resourceTag") String resourceTag); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ProjectDOMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ProjectDOMapper.java index 3baf97b36..b651ef748 100644 --- a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ProjectDOMapper.java +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/ProjectDOMapper.java @@ -13,6 +13,7 @@ package com.antgroup.openspg.server.infra.dao.mapper; +import com.antgroup.openspg.server.api.facade.dto.common.request.ProjectQueryRequest; import com.antgroup.openspg.server.infra.dao.dataobject.ProjectDO; import com.antgroup.openspg.server.infra.dao.dataobject.ProjectDOExample; import java.util.List; @@ -60,12 +61,12 @@ int updateByExample( int updateByPrimaryKey(ProjectDO record); List selectByCondition( - @Param("record") ProjectDO record, + @Param("record") ProjectQueryRequest record, @Param("orderByGmtCreateDesc") Boolean orderByGmtCreateDesc, @Param("start") int start, @Param("size") int size); long selectCountByCondition( - @Param("record") ProjectDO record, + @Param("record") ProjectQueryRequest record, @Param("orderByGmtCreateDesc") Boolean orderByGmtCreateDesc); } diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInfoDOMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInfoDOMapper.java new file mode 100644 index 000000000..84e660d7c --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInfoDOMapper.java @@ -0,0 +1,47 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInfoQuery; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInfoDO; +import java.util.List; + +public interface SchedulerInfoDOMapper { + + /** insert Info */ + Long insert(SchedulerInfoDO record); + + /** update By Id */ + Long update(SchedulerInfoDO record); + + /** delete By id */ + int deleteById(Long id); + + /** get By id */ + SchedulerInfoDO getById(Long id); + + /** get By id */ + SchedulerInfoDO getByName(String name); + + /** query By Condition */ + List query(SchedulerInfoQuery record); + + int selectCountByQuery(SchedulerInfoQuery record); + + /** update Lock */ + int updateLock(Long id); + + /** update Unlock */ + int updateUnlock(Long id); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInstanceDOMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInstanceDOMapper.java new file mode 100644 index 000000000..8fece00f5 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerInstanceDOMapper.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInstanceDO; +import java.util.List; + +public interface SchedulerInstanceDOMapper { + /** insert Instance */ + Long insert(SchedulerInstanceDO record); + + /** delete By JobId */ + int deleteByJobId(Long jobId); + + /** update */ + Long update(SchedulerInstanceDO record); + + /** get By id */ + SchedulerInstanceDO getById(Long id); + + /** get By instanceId */ + SchedulerInstanceDO getByUniqueId(String uniqueId); + + /** query By Condition */ + List query(SchedulerInstanceQuery record); + + /** query By Condition Count */ + int selectCountByQuery(SchedulerInstanceQuery record); + + /** get Not Finish Instance */ + List getNotFinishInstance(SchedulerInstanceQuery record); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerJobDOMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerJobDOMapper.java new file mode 100644 index 000000000..6c8ba3538 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerJobDOMapper.java @@ -0,0 +1,33 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerJobDO; +import java.util.List; + +public interface SchedulerJobDOMapper { + + Long insert(SchedulerJobDO record); + + int deleteById(Long id); + + Long update(SchedulerJobDO record); + + SchedulerJobDO getById(Long id); + + List query(SchedulerJobQuery record); + + int selectCountByQuery(SchedulerJobQuery record); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerTaskDOMapper.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerTaskDOMapper.java new file mode 100644 index 000000000..ec18a9e39 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/mapper/SchedulerTaskDOMapper.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.mapper; + +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerTaskDO; +import java.util.List; +import org.apache.ibatis.annotations.Param; + +public interface SchedulerTaskDOMapper { + + /** insert Task */ + Long insert(SchedulerTaskDO record); + + /** delete By jobId */ + int deleteByJobId(Long jobId); + + /** update By Id */ + Long update(SchedulerTaskDO record); + + /** get By id */ + SchedulerTaskDO getById(Long id); + + /** query By Condition */ + List query(SchedulerTaskQuery record); + + int selectCountByQuery(SchedulerTaskQuery record); + + /** query By InstanceId And nodeId */ + SchedulerTaskDO queryByInstanceIdAndNodeId( + @Param("instanceId") Long instanceId, @Param("nodeId") String nodeId); + + /** query By InstanceId */ + List queryByInstanceId(Long instanceId); + + /** set Status By InstanceId */ + int setStatusByInstanceId(@Param("instanceId") Long instanceId, @Param("status") String status); + + /** update Lock */ + int updateLock(Long id); + + /** update Unlock */ + int updateUnlock(Long id); +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/AccountRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/AccountRepositoryImpl.java new file mode 100644 index 000000000..f4eb6cb85 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/AccountRepositoryImpl.java @@ -0,0 +1,127 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common; + +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.account.Account; +import com.antgroup.openspg.server.common.service.account.AccountRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.AccountDO; +import com.antgroup.openspg.server.infra.dao.mapper.AccountMapper; +import com.antgroup.openspg.server.infra.dao.repository.common.convertor.AccountConvertor; +import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.collections4.CollectionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class AccountRepositoryImpl implements AccountRepository { + + @Autowired private AccountMapper accountMapper; + + @Override + public Integer insert(Account account) { + AccountDO accountDO = AccountConvertor.toDO(account); + accountDO.setGmtCreate(new Date()); + accountDO.setGmtModified(new Date()); + return accountMapper.insert(accountDO); + } + + @Override + public Integer update(Account account) { + + return null; + } + + @Override + public Integer updateByUserNo(Account account) { + AccountDO accountDO = AccountConvertor.toDO(account); + accountDO.setGmtModified(new Date()); + return accountMapper.updateByUserNo(accountDO); + } + + @Override + public Integer deleteByUserNo(String userNo) { + return accountMapper.deleteByUserNo(userNo); + } + + @Override + public Account selectByUserNo(String userNo) { + AccountDO accountDO = accountMapper.getByUserId(userNo); + if (accountDO == null) { + return null; + } + return AccountConvertor.toModel(accountDO); + } + + @Override + public Account selectWithPrivateByUserNo(String userNo) { + AccountDO accountDO = accountMapper.getByUserId(userNo); + if (accountDO == null) { + return null; + } + return AccountConvertor.toModelWithPrivate(accountDO); + } + + @Override + public List query(String keyword) { + List list = accountMapper.getUserLikeUserNoOrDomainNoOrName(keyword); + if (CollectionUtils.isEmpty(list)) { + return Lists.newArrayList(); + } + return list.stream() + .map(accountDO -> AccountConvertor.toModel(accountDO)) + .collect(Collectors.toList()); + } + + @Override + public Paged getAccountList(String loginAccount, Integer start, Integer size) { + AccountDO accountDO = new AccountDO(); + accountDO.setDomainAccount(loginAccount); + Paged result = new Paged<>(); + result.setPageIdx(start); + result.setPageSize(size); + long count = accountMapper.selectCountByCondition(accountDO); + result.setTotal(count); + List list = new ArrayList<>(); + start = start > 0 ? start : 1; + int startPage = (start - 1) * size; + List accountDOS = accountMapper.selectByCondition(accountDO, startPage, size); + if (CollectionUtils.isNotEmpty(accountDOS)) { + list = accountDOS.stream().map(AccountConvertor::toModel).collect(Collectors.toList()); + } + result.setResults(list); + return result; + } + + @Override + public List getSimpleAccountByUserNoList(Collection userNos) { + List list = accountMapper.getSimpleAccountByUserNoList(userNos); + if (CollectionUtils.isEmpty(list)) { + return Lists.newArrayList(); + } + return list.stream() + .map(accountDO -> AccountConvertor.toModel(accountDO)) + .collect(Collectors.toList()); + } + + @Override + public int updateUserConfig(String userNo, String config) { + return accountMapper.updateUserConfig(userNo, config); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/BuilderJobRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/BuilderJobRepositoryImpl.java new file mode 100644 index 000000000..cd52d73e4 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/BuilderJobRepositoryImpl.java @@ -0,0 +1,69 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common; + +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.common.model.bulider.BuilderJobQuery; +import com.antgroup.openspg.server.common.service.builder.BuilderJobRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.BuilderJobDO; +import com.antgroup.openspg.server.infra.dao.mapper.BuilderJobDOMapper; +import com.antgroup.openspg.server.infra.dao.repository.common.convertor.BuilderJobConvertor; +import com.google.common.collect.Lists; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class BuilderJobRepositoryImpl implements BuilderJobRepository { + + @Autowired private BuilderJobDOMapper builderJobDOMapper; + + @Override + public Long insert(BuilderJob record) { + BuilderJobDO jobDO = BuilderJobConvertor.toDO(record); + builderJobDOMapper.insert(jobDO); + record.setId(jobDO.getId()); + return jobDO.getId(); + } + + @Override + public int deleteById(Long id) { + return builderJobDOMapper.deleteById(id); + } + + @Override + public Long update(BuilderJob record) { + return builderJobDOMapper.update(BuilderJobConvertor.toDO(record)); + } + + @Override + public BuilderJob getById(Long id) { + return BuilderJobConvertor.toModel(builderJobDOMapper.getById(id)); + } + + @Override + public Paged query(BuilderJobQuery record) { + Paged pageData = new Paged(record.getPageSize(), record.getPageNo()); + int count = builderJobDOMapper.selectCountByQuery(record); + pageData.setTotal(Long.valueOf(count)); + if (count <= 0) { + pageData.setResults(Lists.newArrayList()); + return pageData; + } + CommonUtils.checkQueryPage(count, record.getPageNo(), record.getPageSize()); + pageData.setResults(BuilderJobConvertor.toModelList(builderJobDOMapper.query(record))); + return pageData; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ConfigRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ConfigRepositoryImpl.java new file mode 100644 index 000000000..41221bb37 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ConfigRepositoryImpl.java @@ -0,0 +1,64 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common; + +import com.antgroup.openspg.server.common.model.config.Config; +import com.antgroup.openspg.server.common.service.config.ConfigRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.ConfigDO; +import com.antgroup.openspg.server.infra.dao.mapper.ConfigMapper; +import com.antgroup.openspg.server.infra.dao.repository.common.convertor.ConfigConvertor; +import java.util.Date; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class ConfigRepositoryImpl implements ConfigRepository { + + @Autowired private ConfigMapper configMapper; + + @Override + public Config query(String configId, String version) { + ConfigDO configDO = configMapper.selectByConfigIdAndVersion(configId, version); + return ConfigConvertor.toModel(configDO); + } + + @Override + public Config getById(Long id) { + ConfigDO configDO = configMapper.selectByPrimaryKey(id); + return ConfigConvertor.toModel(configDO); + } + + @Override + public Integer save(Config config) { + if (null == config) { + return 0; + } + ConfigDO configDO = ConfigConvertor.toDO(config); + configDO.setGmtCreate(new Date()); + configDO.setGmtModified(new Date()); + configDO.setStatus(1); + return configMapper.insert(configDO); + } + + @Override + public Integer update(Config config) { + if (null == config || null == config.getId()) { + return 0; + } + ConfigDO configDO = ConfigConvertor.toDO(config); + configDO.setGmtModified(new Date()); + configDO.setStatus(1); + return configMapper.updateByPrimaryKeySelective(configDO); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/DataSourceRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/DataSourceRepositoryImpl.java new file mode 100644 index 000000000..bbe0b39d8 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/DataSourceRepositoryImpl.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common; + +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.datasource.DataSource; +import com.antgroup.openspg.server.common.model.datasource.DataSourceQuery; +import com.antgroup.openspg.server.common.service.datasource.DataSourceRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.DataSourceDO; +import com.antgroup.openspg.server.infra.dao.mapper.DataSourceDOMapper; +import com.antgroup.openspg.server.infra.dao.repository.common.convertor.DataSourceConvertor; +import com.google.common.collect.Lists; +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class DataSourceRepositoryImpl implements DataSourceRepository { + + @Autowired private DataSourceDOMapper dataSourceDOMapper; + + @Override + public Long insert(DataSource record) { + DataSourceDO jobDO = DataSourceConvertor.toDO(record); + dataSourceDOMapper.insert(jobDO); + record.setId(jobDO.getId()); + return jobDO.getId(); + } + + @Override + public int deleteById(Long id) { + return dataSourceDOMapper.deleteById(id); + } + + @Override + public Long update(DataSource record) { + return dataSourceDOMapper.update(DataSourceConvertor.toDO(record)); + } + + @Override + public DataSource getById(Long id) { + return DataSourceConvertor.toModel(dataSourceDOMapper.getById(id)); + } + + @Override + public Paged query(DataSourceQuery record) { + Paged pageData = new Paged(record.getPageSize(), record.getPageNo()); + int count = dataSourceDOMapper.selectCountByQuery(record); + pageData.setTotal(Long.valueOf(count)); + if (count <= 0) { + pageData.setResults(Lists.newArrayList()); + return pageData; + } + CommonUtils.checkQueryPage(count, record.getPageNo(), record.getPageSize()); + pageData.setResults(DataSourceConvertor.toModelList(dataSourceDOMapper.query(record))); + return pageData; + } + + @Override + public List getGroupByType(DataSourceQuery record) { + return DataSourceConvertor.toModelList(dataSourceDOMapper.getGroupByType(record)); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/PermissionRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/PermissionRepositoryImpl.java new file mode 100644 index 000000000..4e38ac4a4 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/PermissionRepositoryImpl.java @@ -0,0 +1,212 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common; + +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.permission.Permission; +import com.antgroup.openspg.server.common.service.permission.PermissionRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO; +import com.antgroup.openspg.server.infra.dao.mapper.PermissionMapper; +import com.antgroup.openspg.server.infra.dao.repository.common.convertor.PermissionConvertor; +import com.antgroup.openspgapp.common.util.enums.PermissionEnum; +import com.google.common.collect.Lists; +import java.util.Date; +import java.util.List; +import java.util.stream.Collectors; +import org.apache.commons.collections4.CollectionUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class PermissionRepositoryImpl implements PermissionRepository { + + @Autowired private PermissionMapper permissionMapper; + + @Override + public Integer save(Permission permission) { + PermissionDO permissionDO = PermissionConvertor.toDO(permission); + if (null == permissionDO) { + return 0; + } + permissionDO.setGmtCreate(new Date()); + permissionDO.setGmtModified(new Date()); + permissionDO.setStatus("1"); + return permissionMapper.insert(permissionDO); + } + + @Override + public Integer update(Permission permission) { + PermissionDO permissionDO = PermissionConvertor.toDO(permission); + if (null == permissionDO) { + return 0; + } + permissionDO.setGmtModified(new Date()); + if (null != permissionDO.getId()) { + return permissionMapper.updateByPrimaryKeySelective(permissionDO); + } else { + return permissionMapper.updateResourceRole(permissionDO); + } + } + + @Override + public List query( + Long resourceId, String resourceTag, Integer page, Integer pageSize) { + return query(null, null, resourceId, resourceTag, page, pageSize); + } + + @Override + public List queryByUserNoAndRoleId( + String userNo, + Long roleId, + Long resourceId, + String resourceTag, + Integer page, + Integer pageSize) { + if (StringUtils.isBlank(userNo)) { + return Lists.newArrayList(); + } + return query(userNo, roleId, resourceId, resourceTag, page, pageSize); + } + + private List query( + String userNo, + Long roleId, + Long resourceId, + String resourceTag, + Integer page, + Integer pageSize) { + if (null == resourceId || StringUtils.isBlank(resourceTag)) { + return Lists.newArrayList(); + } + if (null == pageSize) { + pageSize = 10; + } + page = page > 0 ? page : 1; + int start = (page - 1) * pageSize; + List permissionList; + if (StringUtils.isBlank(userNo)) { + PermissionDO permissionDO = new PermissionDO(); + permissionDO.setResourceId(resourceId); + permissionDO.setResourceTag(resourceTag); + permissionList = permissionMapper.selectByCondition(permissionDO, start, pageSize); + } else { + permissionList = + permissionMapper.selectLikeByUserNoAndRoleId( + userNo, roleId, resourceId, resourceTag, start, pageSize); + } + if (CollectionUtils.isNotEmpty(permissionList)) { + return permissionList.stream().map(PermissionConvertor::toModel).collect(Collectors.toList()); + } + return Lists.newArrayList(); + } + + @Override + public Paged queryPage( + String userNo, Long roleId, Long resourceId, String resourceTag, Integer page, Integer size) { + Paged result = new Paged<>(); + result.setPageIdx(page); + result.setPageSize(size); + List permissionList; + PermissionDO permissionDO = new PermissionDO(); + permissionDO.setResourceTag(resourceTag); + permissionDO.setResourceId(resourceId); + permissionDO.setRoleId(roleId); + if (null == size) { + size = 10; + } + page = page > 0 ? page : 1; + int start = (page - 1) * size; + if (StringUtils.isNotBlank(userNo)) { + permissionDO.setUserNo(userNo); + result.setTotal( + (long) + permissionMapper.selectLikeCountByUserNoAndRoleId( + userNo, roleId, resourceId, resourceTag)); + permissionList = + permissionMapper.selectLikeByUserNoAndRoleId( + userNo, roleId, resourceId, resourceTag, start, size); + } else { + result.setTotal((long) permissionMapper.selectCountByCondition(permissionDO)); + permissionList = permissionMapper.selectByCondition(permissionDO, start, size); + } + + if (CollectionUtils.isNotEmpty(permissionList)) { + result.setResults( + permissionList.stream().map(PermissionConvertor::toModel).collect(Collectors.toList())); + } + return result; + } + + @Override + public Integer delete(Permission permission) { + int count = 0; + if (null != permission.getId()) { + count = permissionMapper.deleteByPrimaryKey(permission.getId()); + } else if (StringUtils.isNotBlank(permission.getUserNo()) + && StringUtils.isNotBlank(permission.getResourceTag()) + && null != permission.getResourceId()) { + count = permissionMapper.deletePermission(PermissionConvertor.toDO(permission)); + } + return count; + } + + @Override + public List selectByResourceIdsAndResourceTag( + List resourceIds, String resourceTag) { + List permissionList = + permissionMapper.selectByResourceIdsAndResourceTag(resourceIds, resourceTag); + if (CollectionUtils.isNotEmpty(permissionList)) { + return permissionList.stream().map(PermissionConvertor::toModel).collect(Collectors.toList()); + } + return Lists.newArrayList(); + } + + @Override + public List getPermissionByUserRolesAndId( + List resourceIds, String userNo, String roleType, String resourceTag) { + if (CollectionUtils.isEmpty(resourceIds) || StringUtils.isBlank(resourceTag)) { + return Lists.newArrayList(); + } + Long roleId = null; + if (StringUtils.isNotBlank(roleType)) { + roleId = PermissionEnum.valueOf(roleType).getId(); + } + List permissionList = + permissionMapper.getPermissionByUserRolesAndId(resourceIds, userNo, roleId, resourceTag); + if (CollectionUtils.isNotEmpty(permissionList)) { + return permissionList.stream().map(PermissionConvertor::toModel).collect(Collectors.toList()); + } + return Lists.newArrayList(); + } + + @Override + public List getPermissionByUserNoAndResourceTag(String userNo, String resourceTag) { + if (StringUtils.isBlank(userNo) || StringUtils.isBlank(resourceTag)) { + return Lists.newArrayList(); + } + List permissionList = + permissionMapper.getPermissionByUserNoAndResourceTag(userNo, resourceTag); + if (CollectionUtils.isNotEmpty(permissionList)) { + return permissionList.stream().map(PermissionConvertor::toModel).collect(Collectors.toList()); + } + return Lists.newArrayList(); + } + + @Override + public Permission selectByPrimaryKey(Long id) { + PermissionDO permissionDO = permissionMapper.selectByPrimaryKey(id); + return PermissionConvertor.toModel(permissionDO); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ProjectRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ProjectRepositoryImpl.java index f9ffcdcb5..96fbd6a54 100644 --- a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ProjectRepositoryImpl.java +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/ProjectRepositoryImpl.java @@ -23,6 +23,7 @@ import com.antgroup.openspg.server.infra.dao.dataobject.ProjectDOExample; import com.antgroup.openspg.server.infra.dao.mapper.ProjectDOMapper; import com.antgroup.openspg.server.infra.dao.repository.common.convertor.ProjectConvertor; +import com.antgroup.openspg.server.infra.dao.repository.schema.enums.ValidStatusEnum; import java.util.ArrayList; import java.util.Date; import java.util.List; @@ -107,26 +108,35 @@ public List query(ProjectQueryRequest request) { } @Override - public Paged queryPaged(ProjectQueryRequest request, int start, int size) { + public Paged queryPaged(ProjectQueryRequest query, int start, int size) { Paged result = new Paged<>(); result.setPageIdx(start); result.setPageSize(size); - ProjectDO projectDO = new ProjectDO(); - projectDO.setName(request.getName()); - projectDO.setBizDomainId(request.getTenantId()); - long count = - projectDOMapper.selectCountByCondition(projectDO, request.getOrderByGmtCreateDesc()); + long count = projectDOMapper.selectCountByCondition(query, query.getOrderByGmtCreateDesc()); result.setTotal(count); List list = new ArrayList<>(); start = start > 0 ? start : 1; int startPage = (start - 1) * size; List projectDOS = - projectDOMapper.selectByCondition( - projectDO, request.getOrderByGmtCreateDesc(), startPage, size); + projectDOMapper.selectByCondition(query, query.getOrderByGmtCreateDesc(), startPage, size); if (CollectionUtils.isNotEmpty(projectDOS)) { list = projectDOS.stream().map(ProjectConvertor::toModel).collect(Collectors.toList()); } result.setResults(list); return result; } + + @Override + public Project queryByNamespace(String namespace) { + ProjectDOExample example = new ProjectDOExample(); + ProjectDOExample.Criteria criteria = example.createCriteria(); + criteria.andNamespaceEqualTo(namespace); + criteria.andStatusEqualTo(ValidStatusEnum.VALID.name()); + List projectDOS = projectDOMapper.selectByExample(example); + if (CollectionUtils.isEmpty(projectDOS)) { + return null; + } + ProjectDO projectDO = projectDOS.get(0); + return ProjectConvertor.toModel(projectDO); + } } diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/AccountConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/AccountConvertor.java new file mode 100644 index 000000000..900209c0b --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/AccountConvertor.java @@ -0,0 +1,86 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common.convertor; + +import com.alibaba.fastjson.JSON; +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.StringUtils; +import com.antgroup.openspg.server.common.model.account.Account; +import com.antgroup.openspg.server.infra.dao.dataobject.AccountDO; +import com.antgroup.openspgapp.common.util.enums.LanguageEnum; +import com.antgroup.openspgapp.common.util.utils.SpgAppConstant; + +public class AccountConvertor { + + public static AccountDO toDO(Account account) { + if (null == account) { + return null; + } + AccountDO accountDO = new AccountDO(); + accountDO.setId(account.getId()); + accountDO.setUserNo(account.getWorkNo()); + accountDO.setToken(account.getToken()); + accountDO.setDomainAccount(account.getAccount()); + accountDO.setSalt(account.getSalt()); + accountDO.setRealName(account.getRealName()); + accountDO.setNickName(account.getNickName()); + accountDO.setDwAccessKey(account.getPassword()); + accountDO.setEmail(account.getEmail()); + return accountDO; + } + + public static Account toModel(AccountDO accountDO) { + return new Account( + accountDO.getId(), + accountDO.getUserNo(), + accountDO.getRealName(), + accountDO.getNickName(), + accountDO.getDomainAccount(), + accountDO.getEmail(), + accountDO.getGmtCreate(), + accountDO.getGmtModified(), + accountDO.getConfig(), + getUseCurrentLanguage(accountDO.getConfig())); + } + + public static Account toModelWithPrivate(AccountDO accountDO) { + Account account = new Account(); + account.setId(accountDO.getId()); + account.setWorkNo(accountDO.getUserNo()); + account.setToken(accountDO.getToken()); + account.setSalt(accountDO.getSalt()); + account.setRealName(accountDO.getRealName()); + account.setNickName(accountDO.getNickName()); + account.setAccount(accountDO.getDomainAccount()); + account.setPassword(accountDO.getDwAccessKey()); + account.setEmail(accountDO.getEmail()); + account.setGmtCreate(accountDO.getGmtCreate()); + account.setGmtModified(accountDO.getGmtModified()); + account.setUseCurrentLanguage(getUseCurrentLanguage(accountDO.getConfig())); + return account; + } + + public static String getUseCurrentLanguage(String config) { + if (StringUtils.isBlank(config)) { + return LanguageEnum.ZH.getCode(); + } + JSONObject jsonObject = JSON.parseObject(config); + String useCurrentLanguage = jsonObject.getString(SpgAppConstant.USE_CURRENT_LANGUAGE); + if (StringUtils.isNotBlank(useCurrentLanguage)) { + return useCurrentLanguage; + } else { + return LanguageEnum.ZH.getCode(); + } + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/BuilderJobConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/BuilderJobConvertor.java new file mode 100644 index 000000000..4942167c0 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/BuilderJobConvertor.java @@ -0,0 +1,62 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common.convertor; + +import com.antgroup.openspg.common.util.DozerBeanMapperUtil; +import com.antgroup.openspg.server.common.model.bulider.BuilderJob; +import com.antgroup.openspg.server.infra.dao.dataobject.BuilderJobDO; +import com.google.common.collect.Lists; +import java.util.List; + +public class BuilderJobConvertor { + + public static BuilderJobDO toDO(BuilderJob job) { + if (null == job) { + return null; + } + BuilderJobDO jobDO = DozerBeanMapperUtil.map(job, BuilderJobDO.class); + return jobDO; + } + + public static BuilderJob toModel(BuilderJobDO schedulerJobDO) { + if (null == schedulerJobDO) { + return null; + } + + BuilderJob job = DozerBeanMapperUtil.map(schedulerJobDO, BuilderJob.class); + return job; + } + + public static List toDoList(List jobs) { + if (jobs == null) { + return null; + } + List dos = Lists.newArrayList(); + for (BuilderJob job : jobs) { + dos.add(toDO(job)); + } + return dos; + } + + public static List toModelList(List schedulerJobDOs) { + if (schedulerJobDOs == null) { + return null; + } + List jobs = Lists.newArrayList(); + for (BuilderJobDO schedulerJobDO : schedulerJobDOs) { + jobs.add(toModel(schedulerJobDO)); + } + return jobs; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/ConfigConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/ConfigConvertor.java new file mode 100644 index 000000000..fdd8bc44e --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/ConfigConvertor.java @@ -0,0 +1,47 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common.convertor; + +import com.antgroup.openspg.server.common.model.config.Config; +import com.antgroup.openspg.server.infra.dao.dataobject.ConfigDO; + +public class ConfigConvertor { + + public static Config toModel(ConfigDO configDO) { + if (null == configDO) { + return null; + } + return new Config( + configDO.getId(), + configDO.getConfigName(), + configDO.getConfigId(), + configDO.getConfig(), + configDO.getResourceType()); + } + + public static ConfigDO toDO(Config config) { + ConfigDO configDO = new ConfigDO(); + configDO.setId(config.getId()); + configDO.setUserNo(config.getUserNo()); + configDO.setProjectId(config.getProjectId()); + configDO.setConfigName(config.getConfigName()); + configDO.setConfigId(config.getConfigId()); + configDO.setConfig(config.getConfig()); + configDO.setVersion(config.getVersion()); + configDO.setDescription(config.getDescription()); + configDO.setResourceId(config.getResourceId()); + configDO.setResourceType(config.getResourceType()); + return configDO; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/DataSourceConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/DataSourceConvertor.java new file mode 100644 index 000000000..79e8627fc --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/DataSourceConvertor.java @@ -0,0 +1,72 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common.convertor; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.DozerBeanMapperUtil; +import com.antgroup.openspg.server.common.model.datasource.DataSource; +import com.antgroup.openspg.server.infra.dao.dataobject.DataSourceDO; +import com.google.common.collect.Lists; +import java.util.List; +import org.apache.commons.lang3.StringUtils; + +public class DataSourceConvertor { + + public static DataSourceDO toDO(DataSource dataSource) { + if (null == dataSource) { + return null; + } + DataSourceDO dataSourceDO = DozerBeanMapperUtil.map(dataSource, DataSourceDO.class); + if (dataSource.getConnectionInfo() != null) { + dataSourceDO.setConnectionInfo(JSONObject.toJSONString(dataSource.getConnectionInfo())); + } + return dataSourceDO; + } + + public static DataSource toModel(DataSourceDO dataSourceDO) { + if (null == dataSourceDO) { + return null; + } + + DataSource dataSource = DozerBeanMapperUtil.map(dataSourceDO, DataSource.class); + if (StringUtils.isNotBlank(dataSourceDO.getConnectionInfo())) { + dataSource.setConnectionInfo(JSONObject.parseObject(dataSourceDO.getConnectionInfo())); + } else { + dataSource.setConnectionInfo(new JSONObject()); + } + return dataSource; + } + + public static List toDoList(List dataSources) { + if (dataSources == null) { + return null; + } + List dos = Lists.newArrayList(); + for (DataSource dataSource : dataSources) { + dos.add(toDO(dataSource)); + } + return dos; + } + + public static List toModelList(List schedulerJobDOs) { + if (schedulerJobDOs == null) { + return null; + } + List dataSources = Lists.newArrayList(); + for (DataSourceDO schedulerJobDO : schedulerJobDOs) { + dataSources.add(toModel(schedulerJobDO)); + } + return dataSources; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/PermissionConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/PermissionConvertor.java new file mode 100644 index 000000000..6643db5ee --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/common/convertor/PermissionConvertor.java @@ -0,0 +1,45 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.common.convertor; + +import com.antgroup.openspg.server.common.model.permission.Permission; +import com.antgroup.openspg.server.infra.dao.dataobject.PermissionDO; + +public class PermissionConvertor { + + public static PermissionDO toDO(Permission permission) { + if (null == permission) { + return null; + } + PermissionDO permissionDO = new PermissionDO(); + permissionDO.setId(permission.getId()); + permissionDO.setUserNo(permission.getUserNo()); + permissionDO.setResourceTag(permission.getResourceTag()); + permissionDO.setResourceId(permission.getResourceId()); + permissionDO.setRoleId(permission.getRoleId()); + return permissionDO; + } + + public static Permission toModel(PermissionDO permissionDO) { + if (null == permissionDO) { + return null; + } + return new Permission( + permissionDO.getId(), + permissionDO.getUserNo(), + permissionDO.getResourceId(), + permissionDO.getResourceTag(), + permissionDO.getRoleId()); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInfoRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInfoRepositoryImpl.java new file mode 100644 index 000000000..da5f9ccc3 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInfoRepositoryImpl.java @@ -0,0 +1,84 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler; + +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInfoQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerInfoRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInfoDO; +import com.antgroup.openspg.server.infra.dao.mapper.SchedulerInfoDOMapper; +import com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor.SchedulerInfoConvertor; +import com.google.common.collect.Lists; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class SchedulerInfoRepositoryImpl implements SchedulerInfoRepository { + + @Autowired private SchedulerInfoDOMapper infoDOMapper; + + @Override + public Long insert(SchedulerInfo record) { + SchedulerInfoDO infoDO = SchedulerInfoConvertor.toDO(record); + infoDOMapper.insert(infoDO); + record.setId(infoDO.getId()); + return infoDO.getId(); + } + + @Override + public Long update(SchedulerInfo record) { + return infoDOMapper.update(SchedulerInfoConvertor.toDO(record)); + } + + @Override + public int deleteById(Long id) { + return infoDOMapper.deleteById(id); + } + + @Override + public SchedulerInfo getById(Long id) { + return SchedulerInfoConvertor.toModel(infoDOMapper.getById(id)); + } + + @Override + public SchedulerInfo getByName(String name) { + return SchedulerInfoConvertor.toModel(infoDOMapper.getByName(name)); + } + + @Override + public Paged query(SchedulerInfoQuery record) { + Paged pageData = new Paged(record.getPageSize(), record.getPageNo()); + int count = infoDOMapper.selectCountByQuery(record); + pageData.setTotal(Long.valueOf(count)); + if (count <= 0) { + pageData.setResults(Lists.newArrayList()); + return pageData; + } + CommonUtils.checkQueryPage(count, record.getPageNo(), record.getPageSize()); + pageData.setResults(SchedulerInfoConvertor.toModelList(infoDOMapper.query(record))); + return pageData; + } + + @Override + public int updateLock(Long id) { + return infoDOMapper.updateLock(id); + } + + @Override + public int updateUnlock(Long id) { + return infoDOMapper.updateUnlock(id); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInstanceRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInstanceRepositoryImpl.java new file mode 100644 index 000000000..b0b0b1e60 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerInstanceRepositoryImpl.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler; + +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerInstanceRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInstanceDO; +import com.antgroup.openspg.server.infra.dao.mapper.SchedulerInstanceDOMapper; +import com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor.SchedulerInstanceConvertor; +import com.google.common.collect.Lists; +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class SchedulerInstanceRepositoryImpl implements SchedulerInstanceRepository { + + @Autowired private SchedulerInstanceDOMapper instanceDOMapper; + + @Override + public Long insert(SchedulerInstance record) { + SchedulerInstanceDO instanceDO = SchedulerInstanceConvertor.toDO(record); + instanceDOMapper.insert(instanceDO); + record.setId(instanceDO.getId()); + return instanceDO.getId(); + } + + @Override + public int deleteByJobId(Long jobId) { + return instanceDOMapper.deleteByJobId(jobId); + } + + @Override + public Long update(SchedulerInstance record) { + return instanceDOMapper.update(SchedulerInstanceConvertor.toDO(record)); + } + + @Override + public SchedulerInstance getById(Long id) { + return SchedulerInstanceConvertor.toModel(instanceDOMapper.getById(id)); + } + + @Override + public SchedulerInstance getByUniqueId(String uniqueId) { + return SchedulerInstanceConvertor.toModel(instanceDOMapper.getByUniqueId(uniqueId)); + } + + @Override + public Paged query(SchedulerInstanceQuery record) { + Paged pageData = new Paged(record.getPageSize(), record.getPageNo()); + int count = instanceDOMapper.selectCountByQuery(record); + pageData.setTotal(Long.valueOf(count)); + if (count <= 0) { + pageData.setResults(Lists.newArrayList()); + return pageData; + } + CommonUtils.checkQueryPage(count, record.getPageNo(), record.getPageSize()); + pageData.setResults(SchedulerInstanceConvertor.toModelList(instanceDOMapper.query(record))); + return pageData; + } + + @Override + public List getNotFinishInstance(SchedulerInstanceQuery record) { + return SchedulerInstanceConvertor.toModelList(instanceDOMapper.getNotFinishInstance(record)); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerJobRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerJobRepositoryImpl.java new file mode 100644 index 000000000..1550e7b2f --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerJobRepositoryImpl.java @@ -0,0 +1,69 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler; + +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerJobRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerJobDO; +import com.antgroup.openspg.server.infra.dao.mapper.SchedulerJobDOMapper; +import com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor.SchedulerJobConvertor; +import com.google.common.collect.Lists; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class SchedulerJobRepositoryImpl implements SchedulerJobRepository { + + @Autowired private SchedulerJobDOMapper schedulerJobDOMapper; + + @Override + public Long insert(SchedulerJob record) { + SchedulerJobDO jobDO = SchedulerJobConvertor.toDO(record); + schedulerJobDOMapper.insert(jobDO); + record.setId(jobDO.getId()); + return jobDO.getId(); + } + + @Override + public int deleteById(Long id) { + return schedulerJobDOMapper.deleteById(id); + } + + @Override + public Long update(SchedulerJob record) { + return schedulerJobDOMapper.update(SchedulerJobConvertor.toDO(record)); + } + + @Override + public SchedulerJob getById(Long id) { + return SchedulerJobConvertor.toModel(schedulerJobDOMapper.getById(id)); + } + + @Override + public Paged query(SchedulerJobQuery record) { + Paged pageData = new Paged(record.getPageSize(), record.getPageNo()); + int count = schedulerJobDOMapper.selectCountByQuery(record); + pageData.setTotal(Long.valueOf(count)); + if (count <= 0) { + pageData.setResults(Lists.newArrayList()); + return pageData; + } + CommonUtils.checkQueryPage(count, record.getPageNo(), record.getPageSize()); + pageData.setResults(SchedulerJobConvertor.toModelList(schedulerJobDOMapper.query(record))); + return pageData; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerTaskRepositoryImpl.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerTaskRepositoryImpl.java new file mode 100644 index 000000000..aef1fc29a --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/SchedulerTaskRepositoryImpl.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler; + +import com.antgroup.openspg.common.util.CommonUtils; +import com.antgroup.openspg.server.api.facade.Paged; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.core.scheduler.service.repository.SchedulerTaskRepository; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerTaskDO; +import com.antgroup.openspg.server.infra.dao.mapper.SchedulerTaskDOMapper; +import com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor.SchedulerTaskConvertor; +import com.google.common.collect.Lists; +import java.util.List; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Repository; + +@Repository +public class SchedulerTaskRepositoryImpl implements SchedulerTaskRepository { + + @Autowired private SchedulerTaskDOMapper taskDOMapper; + + @Override + public Long insert(SchedulerTask record) { + SchedulerTaskDO taskDO = SchedulerTaskConvertor.toDO(record); + taskDOMapper.insert(taskDO); + record.setId(taskDO.getId()); + return taskDO.getId(); + } + + @Override + public int deleteByJobId(Long jobId) { + return taskDOMapper.deleteByJobId(jobId); + } + + @Override + public Long update(SchedulerTask record) { + return taskDOMapper.update(SchedulerTaskConvertor.toDO(record)); + } + + @Override + public SchedulerTask getById(Long id) { + return SchedulerTaskConvertor.toModel(taskDOMapper.getById(id)); + } + + @Override + public Paged query(SchedulerTaskQuery record) { + Paged pageData = new Paged(record.getPageSize(), record.getPageNo()); + int count = taskDOMapper.selectCountByQuery(record); + pageData.setTotal(Long.valueOf(count)); + if (count <= 0) { + pageData.setResults(Lists.newArrayList()); + return pageData; + } + CommonUtils.checkQueryPage(count, record.getPageNo(), record.getPageSize()); + pageData.setResults(SchedulerTaskConvertor.toModelList(taskDOMapper.query(record))); + return pageData; + } + + @Override + public SchedulerTask queryByInstanceIdAndNodeId(Long instanceId, String nodeId) { + return SchedulerTaskConvertor.toModel( + taskDOMapper.queryByInstanceIdAndNodeId(instanceId, nodeId)); + } + + @Override + public List queryByInstanceId(Long instanceId) { + return SchedulerTaskConvertor.toModelList(taskDOMapper.queryByInstanceId(instanceId)); + } + + @Override + public int setStatusByInstanceId(Long instanceId, SchedulerEnum.TaskStatus status) { + return taskDOMapper.setStatusByInstanceId(instanceId, status.name()); + } + + @Override + public int updateLock(Long id) { + return taskDOMapper.updateLock(id); + } + + @Override + public int updateUnlock(Long id) { + return taskDOMapper.updateUnlock(id); + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInfoConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInfoConvertor.java new file mode 100644 index 000000000..654892f8c --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInfoConvertor.java @@ -0,0 +1,79 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor; + +import com.alibaba.fastjson.JSONObject; +import com.alibaba.fastjson.TypeReference; +import com.antgroup.openspg.common.util.DozerBeanMapperUtil; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfo; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInfoLog; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInfoDO; +import com.google.common.collect.Lists; +import java.util.List; +import org.apache.commons.lang3.StringUtils; + +public class SchedulerInfoConvertor { + + public static SchedulerInfoDO toDO(SchedulerInfo info) { + if (null == info) { + return null; + } + SchedulerInfoDO infoDO = DozerBeanMapperUtil.map(info, SchedulerInfoDO.class); + if (info.getConfig() != null) { + infoDO.setConfig(JSONObject.toJSONString(info.getConfig())); + } + if (info.getLog() != null) { + infoDO.setLog(JSONObject.toJSONString(info.getLog())); + } + return infoDO; + } + + public static SchedulerInfo toModel(SchedulerInfoDO schedulerInfoDO) { + if (null == schedulerInfoDO) { + return null; + } + SchedulerInfo info = DozerBeanMapperUtil.map(schedulerInfoDO, SchedulerInfo.class); + if (StringUtils.isNotBlank(schedulerInfoDO.getConfig())) { + info.setConfig(JSONObject.parseObject(schedulerInfoDO.getConfig())); + } + if (StringUtils.isNotBlank(schedulerInfoDO.getLog())) { + info.setLog( + JSONObject.parseObject( + schedulerInfoDO.getLog(), new TypeReference>() {})); + } + return info; + } + + public static List toDoList(List infos) { + if (infos == null) { + return null; + } + List dos = Lists.newArrayList(); + for (SchedulerInfo info : infos) { + dos.add(toDO(info)); + } + return dos; + } + + public static List toModelList(List schedulerInfoDOs) { + if (schedulerInfoDOs == null) { + return null; + } + List infos = Lists.newArrayList(); + for (SchedulerInfoDO schedulerInfoDO : schedulerInfoDOs) { + infos.add(toModel(schedulerInfoDO)); + } + return infos; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInstanceConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInstanceConvertor.java new file mode 100644 index 000000000..6035cfbca --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerInstanceConvertor.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.DozerBeanMapperUtil; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; +import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerInstanceDO; +import com.google.common.collect.Lists; +import java.util.List; +import org.apache.commons.lang3.StringUtils; + +public class SchedulerInstanceConvertor { + + public static SchedulerInstanceDO toDO(SchedulerInstance instance) { + if (null == instance) { + return null; + } + SchedulerInstanceDO instanceDO = DozerBeanMapperUtil.map(instance, SchedulerInstanceDO.class); + if (instance.getExtension() != null) { + instanceDO.setExtension(JSONObject.toJSONString(instance.getExtension())); + } + if (instance.getTaskDag() != null) { + instanceDO.setTaskDag(JSONObject.toJSONString(instance.getTaskDag())); + } + return instanceDO; + } + + public static SchedulerInstance toModel(SchedulerInstanceDO schedulerInstanceDO) { + if (null == schedulerInstanceDO) { + return null; + } + + SchedulerInstance instance = + DozerBeanMapperUtil.map(schedulerInstanceDO, SchedulerInstance.class); + if (StringUtils.isNotBlank(schedulerInstanceDO.getExtension())) { + instance.setExtension(JSONObject.parseObject(schedulerInstanceDO.getExtension())); + } + if (StringUtils.isNotBlank(schedulerInstanceDO.getTaskDag())) { + instance.setTaskDag( + JSONObject.parseObject(schedulerInstanceDO.getTaskDag(), TaskExecuteDag.class)); + } + return instance; + } + + public static List toDoList(List instances) { + if (instances == null) { + return null; + } + List dos = Lists.newArrayList(); + for (SchedulerInstance instance : instances) { + dos.add(toDO(instance)); + } + return dos; + } + + public static List toModelList( + List schedulerInstanceDOs) { + if (schedulerInstanceDOs == null) { + return null; + } + List instances = Lists.newArrayList(); + for (SchedulerInstanceDO schedulerInstanceDO : schedulerInstanceDOs) { + instances.add(toModel(schedulerInstanceDO)); + } + return instances; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerJobConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerJobConvertor.java new file mode 100644 index 000000000..93c965cba --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerJobConvertor.java @@ -0,0 +1,70 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.DozerBeanMapperUtil; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerJobDO; +import com.google.common.collect.Lists; +import java.util.List; +import org.apache.commons.lang3.StringUtils; + +public class SchedulerJobConvertor { + + public static SchedulerJobDO toDO(SchedulerJob job) { + if (null == job) { + return null; + } + SchedulerJobDO jobDO = DozerBeanMapperUtil.map(job, SchedulerJobDO.class); + if (job.getExtension() != null) { + jobDO.setExtension(JSONObject.toJSONString(job.getExtension())); + } + return jobDO; + } + + public static SchedulerJob toModel(SchedulerJobDO schedulerJobDO) { + if (null == schedulerJobDO) { + return null; + } + + SchedulerJob job = DozerBeanMapperUtil.map(schedulerJobDO, SchedulerJob.class); + if (StringUtils.isNotBlank(schedulerJobDO.getExtension())) { + job.setExtension(JSONObject.parseObject(schedulerJobDO.getExtension())); + } + return job; + } + + public static List toDoList(List jobs) { + if (jobs == null) { + return null; + } + List dos = Lists.newArrayList(); + for (SchedulerJob job : jobs) { + dos.add(toDO(job)); + } + return dos; + } + + public static List toModelList(List schedulerJobDOs) { + if (schedulerJobDOs == null) { + return null; + } + List jobs = Lists.newArrayList(); + for (SchedulerJobDO schedulerJobDO : schedulerJobDOs) { + jobs.add(toModel(schedulerJobDO)); + } + return jobs; + } +} diff --git a/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerTaskConvertor.java b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerTaskConvertor.java new file mode 100644 index 000000000..e5dae9969 --- /dev/null +++ b/server/infra/dao/src/main/java/com/antgroup/openspg/server/infra/dao/repository/scheduler/convertor/SchedulerTaskConvertor.java @@ -0,0 +1,70 @@ +/* + * Copyright 2023 OpenSPG Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. + */ + +package com.antgroup.openspg.server.infra.dao.repository.scheduler.convertor; + +import com.alibaba.fastjson.JSONObject; +import com.antgroup.openspg.common.util.DozerBeanMapperUtil; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; +import com.antgroup.openspg.server.infra.dao.dataobject.SchedulerTaskDO; +import com.google.common.collect.Lists; +import java.util.List; +import org.apache.commons.lang3.StringUtils; + +public class SchedulerTaskConvertor { + + public static SchedulerTaskDO toDO(SchedulerTask task) { + if (null == task) { + return null; + } + SchedulerTaskDO taskDO = DozerBeanMapperUtil.map(task, SchedulerTaskDO.class); + if (task.getExtension() != null) { + taskDO.setExtension(JSONObject.toJSONString(task.getExtension())); + } + return taskDO; + } + + public static SchedulerTask toModel(SchedulerTaskDO schedulerTaskDO) { + if (null == schedulerTaskDO) { + return null; + } + + SchedulerTask task = DozerBeanMapperUtil.map(schedulerTaskDO, SchedulerTask.class); + if (StringUtils.isNotBlank(schedulerTaskDO.getExtension())) { + task.setExtension(JSONObject.parseObject(schedulerTaskDO.getExtension())); + } + return task; + } + + public static List toDoList(List tasks) { + if (tasks == null) { + return null; + } + List dos = Lists.newArrayList(); + for (SchedulerTask task : tasks) { + dos.add(toDO(task)); + } + return dos; + } + + public static List toModelList(List schedulerTaskDOs) { + if (schedulerTaskDOs == null) { + return null; + } + List tasks = Lists.newArrayList(); + for (SchedulerTaskDO schedulerTaskDO : schedulerTaskDOs) { + tasks.add(toModel(schedulerTaskDO)); + } + return tasks; + } +} diff --git a/server/infra/dao/src/main/resources/mapper/AccountMapper.xml b/server/infra/dao/src/main/resources/mapper/AccountMapper.xml new file mode 100644 index 000000000..19010b6df --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/AccountMapper.xml @@ -0,0 +1,457 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, user_no, token, last_token, salt, gmt_last_token_disable, + dw_access_id, dw_access_key, real_name, nick_name, email, domain_account, mobile, wx_account, config + + + + + + + + + + + + + + + + + + + delete from kg_user + where id = #{id,jdbcType=BIGINT} + + + + + + delete from kg_user where id in + + #{item} + + + + + + + SELECT LAST_INSERT_ID() + + insert into kg_user (gmt_create, gmt_modified, user_no, token, last_token, salt, gmt_last_token_disable, + dw_access_id, dw_access_key, real_name, nick_name, email, domain_account, mobile, wx_account) + values (#{gmtCreate,jdbcType=TIMESTAMP}, #{gmtModified,jdbcType=TIMESTAMP}, #{userNo,jdbcType=VARCHAR}, + #{token,jdbcType=VARCHAR}, #{lastToken,jdbcType=VARCHAR}, #{salt,jdbcType=VARCHAR}, + #{gmtLastTokenDisable,jdbcType=TIMESTAMP}, #{dwAccessId,jdbcType=VARCHAR}, #{dwAccessKey,jdbcType=VARCHAR}, + #{realName,jdbcType=VARCHAR}, #{nickName,jdbcType=VARCHAR}, #{email,jdbcType=VARCHAR}, + #{domainAccount,jdbcType=VARCHAR}, #{mobile,jdbcType=VARCHAR}, #{wxAccount,jdbcType=VARCHAR}) + + + + + update kg_user + + + gmt_create = #{gmtCreate,jdbcType=TIMESTAMP}, + + + gmt_modified = #{gmtModified,jdbcType=TIMESTAMP}, + + + user_no = #{userNo,jdbcType=VARCHAR}, + + + token = #{token,jdbcType=VARCHAR}, + + + last_token = #{lastToken,jdbcType=VARCHAR}, + + + salt = #{salt,jdbcType=VARCHAR}, + + + gmt_last_token_disable = #{gmtLastTokenDisable,jdbcType=TIMESTAMP}, + + + dw_access_id = #{dwAccessId,jdbcType=VARCHAR}, + + + dw_access_key = #{dwAccessKey,jdbcType=VARCHAR}, + + + real_name = #{realName,jdbcType=VARCHAR}, + + + nick_name = #{nickName,jdbcType=VARCHAR}, + + + email = #{email,jdbcType=VARCHAR}, + + + domain_account = #{domainAccount,jdbcType=VARCHAR}, + + + mobile = #{mobile,jdbcType=VARCHAR}, + + + wx_account = #{wxAccount,jdbcType=VARCHAR}, + + + where id = #{id,jdbcType=BIGINT} + + + + + update kg_user + + + gmt_create = #{gmtCreate,jdbcType=TIMESTAMP}, + + + gmt_modified = #{gmtModified,jdbcType=TIMESTAMP}, + + + token = #{token,jdbcType=VARCHAR}, + + + last_token = #{lastToken,jdbcType=VARCHAR}, + + + salt = #{salt,jdbcType=VARCHAR}, + + + gmt_last_token_disable = #{gmtLastTokenDisable,jdbcType=TIMESTAMP}, + + + dw_access_id = #{dwAccessId,jdbcType=VARCHAR}, + + + dw_access_key = #{dwAccessKey,jdbcType=VARCHAR}, + + + real_name = #{realName,jdbcType=VARCHAR}, + + + nick_name = #{nickName,jdbcType=VARCHAR}, + + + email = #{email,jdbcType=VARCHAR}, + + + domain_account = #{domainAccount,jdbcType=VARCHAR}, + + + mobile = #{mobile,jdbcType=VARCHAR}, + + + wx_account = #{wxAccount,jdbcType=VARCHAR}, + + + where user_no = #{userNo,jdbcType=VARCHAR} + + + + + + update kg_user + + + token = #{token}, + + + last_token = #{lastToken}, + + + salt = #{salt}, + + + dw_access_id = #{dwAccessId}, + + + dw_access_key = #{dwAccessKey}, + + gmt_last_token_disable = date_add(CURRENT_TIMESTAMP, interval 1 day), gmt_modified = CURRENT_TIMESTAMP + + where + user_no= #{userNo} + + + + update kg_user + set gmt_modified=CURRENT_TIMESTAMP,dw_access_id=#{dwAccessId},dw_access_key=#{dwAccessKey} + where + user_no= #{userNo} + + + + + + + + + + + + UPDATE kg_user + + + real_name = #{realName,jdbcType=VARCHAR}, + + + nick_name = #{nickName,jdbcType=VARCHAR}, + + + email = #{email,jdbcType=VARCHAR}, + + + domain_account = #{domainAccount,jdbcType=VARCHAR}, + + + mobile = #{mobile,jdbcType=VARCHAR}, + + + wx_account = #{wxAccount,jdbcType=VARCHAR}, + + + gmt_modified = #{gmtModified,jdbcType=TIMESTAMP}, + + + where user_no = #{userNo} + + + + + + + + update kg_user + set gmt_modified=CURRENT_TIMESTAMP,config=#{config} + where + user_no= #{userNo} + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/BuilderJobDOMapper.xml b/server/infra/dao/src/main/resources/mapper/BuilderJobDOMapper.xml new file mode 100644 index 000000000..f136e6be1 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/BuilderJobDOMapper.xml @@ -0,0 +1,200 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, create_user, modify_user, project_id, task_id, job_name, chunk_num, file_url, status, type, version, extension, computing_conf, life_cycle, action + + + + + SELECT LAST_INSERT_ID() + + insert into kg_builder_job (project_id, gmt_create, gmt_modified, + create_user, modify_user, task_id, + job_name, chunk_num, file_url, + status, type, version, extension, + computing_conf, life_cycle, action + ) + values (#{projectId,jdbcType=BIGINT}, #{gmtCreate,jdbcType=TIMESTAMP}, #{gmtModified,jdbcType=TIMESTAMP}, + #{createUser,jdbcType=VARCHAR}, #{modifyUser,jdbcType=VARCHAR}, #{taskId,jdbcType=BIGINT}, + #{jobName,jdbcType=VARCHAR}, #{chunkNum,jdbcType=BIGINT}, #{fileUrl,jdbcType=VARCHAR}, + #{status,jdbcType=VARCHAR}, #{type,jdbcType=VARCHAR}, #{version,jdbcType=VARCHAR}, #{extension,jdbcType=LONGVARCHAR}, + #{computingConf,jdbcType=LONGVARCHAR}, #{lifeCycle,jdbcType=VARCHAR}, #{action,jdbcType=VARCHAR} + ) + + + + delete from kg_builder_job where id = #{id,jdbcType=BIGINT} + + + + update kg_builder_job + + gmt_modified = CURRENT_TIMESTAMP, + + project_id = #{projectId,jdbcType=BIGINT}, + + + modify_user = #{modifyUser,jdbcType=VARCHAR}, + + + task_id = #{taskId,jdbcType=BIGINT}, + + + job_name = #{jobName,jdbcType=VARCHAR}, + + + chunk_num = #{chunkNum,jdbcType=BIGINT}, + + + file_url = #{fileUrl,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=VARCHAR}, + + + type = #{type,jdbcType=VARCHAR}, + + + version = #{version,jdbcType=VARCHAR}, + + + extension = #{extension,jdbcType=LONGVARCHAR}, + + + computing_conf = #{computingConf,jdbcType=LONGVARCHAR}, + + + life_cycle = #{lifeCycle,jdbcType=VARCHAR}, + + + action = #{action,jdbcType=VARCHAR}, + + + where id = #{id,jdbcType=BIGINT} + + + + + + + and ( job_name like concat('%', #{keyword}, '%') or + file_url like concat('%', #{keyword}, '%') or + extension like concat('%', #{keyword}, '%') ) + + + create_user = #{createUser,jdbcType=VARCHAR} + + + and modify_user = #{modifyUser,jdbcType=VARCHAR} + + + and project_id = #{projectId,jdbcType=BIGINT} + + + and task_id = #{taskId,jdbcType=BIGINT} + + + and job_name like concat('%', #{jobName,jdbcType=VARCHAR}, '%') + + + and chunk_num = #{chunkNum,jdbcType=BIGINT} + + + and file_url like concat('%', #{fileUrl,jdbcType=VARCHAR}, '%') + + + and status = #{status,jdbcType=VARCHAR} + + + and type = #{type,jdbcType=VARCHAR} + + + and version = #{version,jdbcType=VARCHAR} + + + and extension like concat('%', #{extension,jdbcType=LONGVARCHAR}, '%') + + + and computing_conf like concat('%', #{computingConf,jdbcType=LONGVARCHAR}, '%') + + + and life_cycle = #{lifeCycle,jdbcType=VARCHAR} + + + and action = #{action,jdbcType=VARCHAR} + + + and gmt_create >= #{startCreateTime,jdbcType=TIMESTAMP} + + + and gmt_create <= #{endCreateTime,jdbcType=TIMESTAMP} + + + + + + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/ConfigMapper.xml b/server/infra/dao/src/main/resources/mapper/ConfigMapper.xml new file mode 100644 index 000000000..813b75a24 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/ConfigMapper.xml @@ -0,0 +1,267 @@ + + + + + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, user_no, project_id, config_name, config_id, version, + status, config, description, resource_id, resource_type + + + + + + + + + + + + + + + + + + + delete from kg_config + where id = #{id,jdbcType=BIGINT} + + + + + + delete from kg_config where id in + + #{item} + + + + + + + + SELECT LAST_INSERT_ID() + + insert into kg_config (gmt_create, gmt_modified, user_no, + project_id, config_name, config_id, + version, status, config, description, resource_id, resource_type + ) + values (#{gmtCreate,jdbcType=TIMESTAMP}, #{gmtModified,jdbcType=TIMESTAMP}, #{userNo,jdbcType=VARCHAR}, + #{projectId,jdbcType=VARCHAR}, #{configName,jdbcType=VARCHAR}, #{configId,jdbcType=VARCHAR}, + #{version,jdbcType=VARCHAR}, #{status,jdbcType=INTEGER}, #{config,jdbcType=LONGVARCHAR}, + #{description,jdbcType=LONGVARCHAR}, #{resourceId,jdbcType=VARCHAR}, #{resourceType,jdbcType=VARCHAR} + ) + + + + + + update kg_config + + + gmt_create = #{gmtCreate,jdbcType=TIMESTAMP}, + + + gmt_modified = #{gmtModified,jdbcType=TIMESTAMP}, + + + user_no = #{userNo,jdbcType=VARCHAR}, + + + project_id = #{projectId,jdbcType=VARCHAR}, + + + config_name = #{configName,jdbcType=VARCHAR}, + + + version = #{version,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=INTEGER}, + + + config = #{config,jdbcType=LONGVARCHAR}, + + + description = #{description,jdbcType=VARCHAR}, + + + resource_id = #{resourceId,jdbcType=VARCHAR} + + + resource_type = #{resourceType,jdbcType=VARCHAR} + + + where id = #{id,jdbcType=BIGINT} + + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/DataSourceDOMapper.xml b/server/infra/dao/src/main/resources/mapper/DataSourceDOMapper.xml new file mode 100644 index 000000000..4a02d9261 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/DataSourceDOMapper.xml @@ -0,0 +1,194 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, create_user, update_user, status, remark, type, + db_name, db_url, db_user, db_password, db_password as encrypt, db_driver_name, + category, connection_info + + + + + SELECT LAST_INSERT_ID() + + insert into kg_data_source (gmt_create, gmt_modified, create_user, + update_user, status, remark, + type, db_name, db_url, + db_user, db_password, db_driver_name, + category, connection_info) + values (CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, #{createUser,jdbcType=VARCHAR}, + #{updateUser,jdbcType=VARCHAR}, #{status,jdbcType=VARCHAR}, #{remark,jdbcType=VARCHAR}, + #{type,jdbcType=VARCHAR}, #{dbName,jdbcType=VARCHAR}, #{dbUrl,jdbcType=VARCHAR}, + #{dbUser,jdbcType=VARCHAR}, #{dbPassword,jdbcType=VARCHAR}, #{dbDriverName,jdbcType=VARCHAR}, + #{category,jdbcType=VARCHAR}, #{connectionInfo,jdbcType=VARCHAR}) + + + + delete from kg_data_source where id = #{id,jdbcType=BIGINT} + + + + update kg_data_source + + gmt_modified = CURRENT_TIMESTAMP, + + create_user = #{createUser,jdbcType=VARCHAR}, + + + update_user = #{updateUser,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=VARCHAR}, + + + remark = #{remark,jdbcType=VARCHAR}, + + + type = #{type,jdbcType=VARCHAR}, + + + db_name = #{dbName,jdbcType=VARCHAR}, + + + db_url = #{dbUrl,jdbcType=VARCHAR}, + + + db_user = #{dbUser,jdbcType=VARCHAR}, + + + db_password = #{dbPassword,jdbcType=VARCHAR}, + + + db_driver_name = #{dbDriverName,jdbcType=VARCHAR}, + + + category = #{category,jdbcType=VARCHAR}, + + + connection_info = #{connectionInfo,jdbcType=VARCHAR}, + + + where id = #{id,jdbcType=BIGINT} + + + + + + + + gmt_create = #{gmtCreate,jdbcType=TIMESTAMP} + + + and gmt_modified = #{gmtModified,jdbcType=TIMESTAMP} + + + and create_user = #{createUser,jdbcType=VARCHAR} + + + and update_user = #{updateUser,jdbcType=VARCHAR} + + + and status = #{status,jdbcType=VARCHAR} + + + and remark = #{remark,jdbcType=VARCHAR} + + + and type = #{type,jdbcType=VARCHAR} + + + and db_name like concat('%', #{dbName,jdbcType=VARCHAR},'%') + + + and db_url like concat('%', #{dbUrl,jdbcType=VARCHAR},'%') + + + and db_user = #{dbUser,jdbcType=VARCHAR} + + + and db_password = #{dbPassword,jdbcType=VARCHAR} + + + and db_driver_name = #{dbDriverName,jdbcType=VARCHAR} + + + and category = #{category,jdbcType=VARCHAR} + + + and gmt_create >= #{startCreateTime,jdbcType=TIMESTAMP} + + + and gmt_create <= #{endCreateTime,jdbcType=TIMESTAMP} + + + and connection_info like concat('%', #{connectionInfo,jdbcType=VARCHAR},'%') + + + + + + + + + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/PermissionMapper.xml b/server/infra/dao/src/main/resources/mapper/PermissionMapper.xml new file mode 100644 index 000000000..87542b0c2 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/PermissionMapper.xml @@ -0,0 +1,436 @@ + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, user_no, resource_id, role_id, resource_tag, status, + expire_date + + + + + + + + + + + + + + + + delete from kg_resource_permission + where id = #{id,jdbcType=BIGINT} + + + + + + delete from kg_resource_permission where id in + + #{item} + + + + + + + + SELECT LAST_INSERT_ID() + + insert into kg_resource_permission (gmt_create, gmt_modified, user_no, + resource_id, role_id, resource_tag, + status, expire_date) + values (#{gmtCreate,jdbcType=TIMESTAMP}, #{gmtModified,jdbcType=TIMESTAMP}, #{userNo,jdbcType=VARCHAR}, + #{resourceId,jdbcType=BIGINT}, #{roleId,jdbcType=BIGINT}, #{resourceTag,jdbcType=VARCHAR}, + #{status,jdbcType=VARCHAR}, #{expireDate,jdbcType=DATE}) + + + + + update kg_resource_permission + + + gmt_create = #{gmtCreate,jdbcType=TIMESTAMP}, + + + gmt_modified = #{gmtModified,jdbcType=TIMESTAMP}, + + + user_no = #{userNo,jdbcType=VARCHAR}, + + + resource_id = #{resourceId,jdbcType=BIGINT}, + + + role_id = #{roleId,jdbcType=BIGINT}, + + + resource_tag = #{resourceTag,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=VARCHAR}, + + + expire_date = #{expireDate,jdbcType=DATE}, + + + where id = #{id,jdbcType=BIGINT} + + + + + insert into kg_resource_permission (gmt_create, gmt_modified, user_no, + resource_id, role_id, resource_tag, + status, expire_date) + values + + (#{item.gmtCreate,jdbcType=TIMESTAMP}, + #{item.gmtModified,jdbcType=TIMESTAMP}, + #{item.userNo,jdbcType=VARCHAR}, + #{item.resourceId,jdbcType=BIGINT}, + #{item.roleId,jdbcType=BIGINT}, + #{item.resourceTag,jdbcType=VARCHAR}, + #{item.status,jdbcType=VARCHAR}, + #{item.expireDate,jdbcType=DATE}) + + + + + update kg_resource_permission set + gmt_modified = #{record.gmtModified,jdbcType=TIMESTAMP}, + role_id = #{record.roleId,jdbcType=BIGINT} + where user_no = #{record.userNo,jdbcType=VARCHAR} + and resource_id = #{record.resourceId,jdbcType=VARCHAR} + and resource_tag = #{record.resourceTag,jdbcType=VARCHAR} + + + + + + + + + + delete from kg_resource_permission + where resource_id = #{resourceId} and resource_tag = #{resourceTag} + + + + delete from kg_resource_permission + where user_no = #{record.userNo,jdbcType=VARCHAR} and resource_id = #{record.resourceId,jdbcType=VARCHAR} + + + + delete from kg_resource_permission + where user_no = #{userNo} + and resource_id = #{resourceId} + and resource_tag = #{resourceTag} + + + + + + + + + + delete from kg_resource_permission + where resource_id in + + #{item} + + + + + + + + + + + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/ProjectDOMapper.xml b/server/infra/dao/src/main/resources/mapper/ProjectDOMapper.xml index 5717ee971..25e0466aa 100644 --- a/server/infra/dao/src/main/resources/mapper/ProjectDOMapper.xml +++ b/server/infra/dao/src/main/resources/mapper/ProjectDOMapper.xml @@ -113,25 +113,25 @@ DELETE FROM kg_ontology_entity WHERE name like concat(#{namespace}, '.%') ; - DELETE FROM kg_ontology_entity_property_range WHERE id = #{id,jdbcType=BIGINT}; + DELETE FROM kg_ontology_entity_property_range WHERE project_id = #{id,jdbcType=BIGINT}; - DELETE FROM kg_project_entity WHERE id = #{id,jdbcType=BIGINT}; + DELETE FROM kg_project_entity WHERE project_id = #{id,jdbcType=BIGINT}; - DELETE FROM kg_ontology_release WHERE id = #{id,jdbcType=BIGINT}; + DELETE FROM kg_ontology_release WHERE project_id = #{id,jdbcType=BIGINT}; - DELETE FROM kg_reason_session WHERE id = #{id,jdbcType=BIGINT}; + DELETE FROM kg_reason_session WHERE project_id = #{id,jdbcType=BIGINT}; - DELETE FROM kg_reason_task WHERE id = #{id,jdbcType=BIGINT}; + DELETE FROM kg_reason_task WHERE project_id = #{id,jdbcType=BIGINT}; - DELETE FROM kg_reason_tutorial WHERE id = #{id,jdbcType=BIGINT}; + DELETE FROM kg_reason_tutorial WHERE project_id = #{id,jdbcType=BIGINT}; - DELETE FROM kg_builder_job WHERE id = #{id,jdbcType=BIGINT}; + DELETE FROM kg_builder_job WHERE project_id = #{id,jdbcType=BIGINT}; delete from kg_project_info @@ -316,12 +316,19 @@ namespace like concat('%', #{record.namespace}, '%') and - - biz_domain_id = #{record.bizDomainId,jdbcType=BIGINT} and + + biz_domain_id = #{record.tenantId,jdbcType=BIGINT} and config like concat('%', #{record.config}, '%') and + + id in + + #{projectId,jdbcType=BIGINT} + + and + 1 = 1 @@ -329,14 +336,16 @@ - gmt_create DESC, + gmt_create DESC - gmt_create ASC, + gmt_create ASC - gmt_modified DESC + + gmt_modified DESC + select count(*) from kg_project_info where - \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/SchedulerInfoDOMapper.xml b/server/infra/dao/src/main/resources/mapper/SchedulerInfoDOMapper.xml new file mode 100644 index 000000000..edb24cbe7 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/SchedulerInfoDOMapper.xml @@ -0,0 +1,146 @@ + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, name, status, period, count, log, config, lock_time + + + + + SELECT LAST_INSERT_ID() + + insert into kg_scheduler_info (gmt_create, gmt_modified, name, status, period, count, log, config, lock_time) + values (CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, #{name,jdbcType=VARCHAR}, #{status,jdbcType=VARCHAR}, #{period,jdbcType=BIGINT}, + #{count,jdbcType=INTEGER}, #{log,jdbcType=LONGVARCHAR}, #{config,jdbcType=LONGVARCHAR}, #{lockTime,jdbcType=TIMESTAMP} ) + + + + delete from kg_scheduler_info where id = #{id,jdbcType=BIGINT} + + + + update kg_scheduler_info + + gmt_modified = CURRENT_TIMESTAMP, + + name = #{name,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=VARCHAR}, + + + period = #{period,jdbcType=BIGINT}, + + + count = #{count,jdbcType=INTEGER}, + + + log = #{log,jdbcType=LONGVARCHAR}, + + + config = #{config,jdbcType=LONGVARCHAR}, + + + where id = #{id,jdbcType=BIGINT} + + + + + + + + + and name like concat('%', #{name,jdbcType=VARCHAR}, '%') + + + and status = #{status,jdbcType=VARCHAR} + + + and period = #{period,jdbcType=BIGINT} + + + and count = #{count,jdbcType=INTEGER} + + + and log like concat('%', #{log,jdbcType=LONGVARCHAR}, '%') + + + and config like concat('%', #{config,jdbcType=LONGVARCHAR}, '%') + + + and lock_time = #{lockTime,jdbcType=TIMESTAMP} + + + + + + + + + update kg_scheduler_info set lock_time = CURRENT_TIMESTAMP where id = #{id,jdbcType=BIGINT} and lock_time is NULL + + + + update kg_scheduler_info set lock_time = null where id = #{id,jdbcType=BIGINT} + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/SchedulerInstanceDOMapper.xml b/server/infra/dao/src/main/resources/mapper/SchedulerInstanceDOMapper.xml new file mode 100644 index 000000000..d9355dde5 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/SchedulerInstanceDOMapper.xml @@ -0,0 +1,216 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, create_user, project_id, job_id, unique_id, type, status, progress, begin_running_time, finish_time, life_cycle, dependence, scheduler_date, version, extension, task_dag + + + + + SELECT LAST_INSERT_ID() + + insert into kg_scheduler_instance (gmt_create, gmt_modified, create_user, project_id, job_id, unique_id, type, status, progress, + begin_running_time, finish_time, life_cycle, dependence, scheduler_date, version, extension, task_dag) + values (CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, #{createUser,jdbcType=VARCHAR}, #{projectId,jdbcType=BIGINT}, #{jobId,jdbcType=BIGINT}, + #{uniqueId,jdbcType=VARCHAR}, #{type,jdbcType=VARCHAR}, #{status,jdbcType=VARCHAR}, #{progress,jdbcType=BIGINT}, + #{beginRunningTime,jdbcType=TIMESTAMP}, #{finishTime,jdbcType=TIMESTAMP}, #{lifeCycle,jdbcType=VARCHAR}, #{dependence,jdbcType=VARCHAR}, + #{schedulerDate,jdbcType=TIMESTAMP}, #{version,jdbcType=VARCHAR}, #{extension,jdbcType=LONGVARCHAR}, #{taskDag,jdbcType=LONGVARCHAR} ) + + + + delete from kg_scheduler_instance where job_id = #{jobId,jdbcType=BIGINT}; + + + + delete from kg_scheduler_instance where id = #{id,jdbcType=BIGINT}; + + + + update kg_scheduler_instance + + gmt_modified = CURRENT_TIMESTAMP, + + project_id = #{projectId,jdbcType=BIGINT}, + + + job_id = #{jobId,jdbcType=BIGINT}, + + + type = #{type,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=VARCHAR}, + + + progress = #{progress,jdbcType=BIGINT}, + + + begin_running_time = #{beginRunningTime,jdbcType=TIMESTAMP}, + + + finish_time = #{finishTime,jdbcType=TIMESTAMP}, + + + life_cycle = #{lifeCycle,jdbcType=VARCHAR}, + + + dependence = #{dependence,jdbcType=VARCHAR}, + + + scheduler_date = #{schedulerDate,jdbcType=TIMESTAMP}, + + + version = #{version,jdbcType=VARCHAR}, + + + extension = #{extension,jdbcType=LONGVARCHAR}, + + + task_dag = #{taskDag,jdbcType=LONGVARCHAR}, + + + where id = #{id,jdbcType=BIGINT} + + + + + + + + + create_user = #{createUser,jdbcType=VARCHAR} + + + and project_id = #{projectId,jdbcType=BIGINT} + + + and job_id = #{jobId,jdbcType=BIGINT} + + + and unique_id = #{uniqueId,jdbcType=VARCHAR} + + + and type = #{type,jdbcType=VARCHAR} + + + and status = #{status,jdbcType=VARCHAR} + + + and progress = #{progress,jdbcType=BIGINT} + + + and life_cycle = #{lifeCycle,jdbcType=VARCHAR} + + + and dependence = #{dependence,jdbcType=VARCHAR} + + + and version = #{version,jdbcType=VARCHAR} + + + and extension like concat('%', #{extension,jdbcType=VARCHAR}, '%') + + + and task_dag like concat('%', #{taskDag,jdbcType=VARCHAR}, '%') + + + and gmt_create >= #{startCreateTime,jdbcType=TIMESTAMP} + + + and gmt_create <= #{endCreateTime,jdbcType=TIMESTAMP} + + + + + + + + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/SchedulerJobDOMapper.xml b/server/infra/dao/src/main/resources/mapper/SchedulerJobDOMapper.xml new file mode 100644 index 000000000..cd89bc633 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/SchedulerJobDOMapper.xml @@ -0,0 +1,179 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, create_user, modify_user, project_id, name, life_cycle, translate_type, status, dependence, scheduler_cron, last_execute_time, invoker_id, version, extension + + + + + SELECT LAST_INSERT_ID() + + insert into kg_scheduler_job (gmt_create, gmt_modified, create_user, modify_user, project_id, name, life_cycle, translate_type, status, + dependence, scheduler_cron, last_execute_time, invoker_id, version, extension) + values (CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, #{createUser,jdbcType=VARCHAR}, #{modifyUser,jdbcType=VARCHAR}, #{projectId,jdbcType=BIGINT}, + #{name,jdbcType=VARCHAR}, #{lifeCycle,jdbcType=VARCHAR}, #{translateType,jdbcType=VARCHAR}, #{status,jdbcType=VARCHAR}, + #{dependence,jdbcType=VARCHAR}, #{schedulerCron,jdbcType=VARCHAR}, #{lastExecuteTime,jdbcType=TIMESTAMP}, #{invokerId,jdbcType=VARCHAR}, + #{version,jdbcType=VARCHAR}, #{extension,jdbcType=LONGVARCHAR} ) + + + + delete from kg_scheduler_job where id = #{id,jdbcType=BIGINT}; + + + + update kg_scheduler_job + + gmt_modified = CURRENT_TIMESTAMP, + + modify_user = #{modifyUser,jdbcType=VARCHAR}, + + + project_id = #{projectId,jdbcType=BIGINT}, + + + name = #{name,jdbcType=VARCHAR}, + + + life_cycle = #{lifeCycle,jdbcType=VARCHAR}, + + + translate_type = #{translateType,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=VARCHAR}, + + + dependence = #{dependence,jdbcType=VARCHAR}, + + + scheduler_cron = #{schedulerCron,jdbcType=VARCHAR}, + + + last_execute_time = #{lastExecuteTime,jdbcType=TIMESTAMP}, + + + invoker_id = #{invokerId,jdbcType=VARCHAR}, + + + version = #{version,jdbcType=VARCHAR}, + + + extension = #{extension,jdbcType=LONGVARCHAR}, + + + where id = #{id,jdbcType=BIGINT} + + + + + + + create_user = #{createUser,jdbcType=VARCHAR} + + + and modify_user = #{modifyUser,jdbcType=VARCHAR} + + + and project_id = #{projectId,jdbcType=BIGINT} + + + and name like concat('%', #{name,jdbcType=VARCHAR}, '%') + + + and life_cycle = #{lifeCycle,jdbcType=VARCHAR} + + + and translate_type = #{translateType,jdbcType=VARCHAR} + + + and status = #{status,jdbcType=VARCHAR} + + + and dependence = #{dependence,jdbcType=VARCHAR} + + + and scheduler_cron = #{schedulerCron,jdbcType=VARCHAR} + + + and invoker_id = #{invokerId,jdbcType=VARCHAR} + + + and version = #{version,jdbcType=VARCHAR} + + + and extension like concat('%', #{extension,jdbcType=LONGVARCHAR}, '%') + + + and gmt_create >= #{startCreateTime,jdbcType=TIMESTAMP} + + + and gmt_create <= #{endCreateTime,jdbcType=TIMESTAMP} + + + + + + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mapper/SchedulerTaskDOMapper.xml b/server/infra/dao/src/main/resources/mapper/SchedulerTaskDOMapper.xml new file mode 100644 index 000000000..485b96656 --- /dev/null +++ b/server/infra/dao/src/main/resources/mapper/SchedulerTaskDOMapper.xml @@ -0,0 +1,226 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, gmt_create, gmt_modified, project_id, job_id, instance_id, type, title, status, execute_num, begin_time, finish_time, estimate_finish_time, trace_log, lock_time, resource, input, output, node_id, extension + + + + + SELECT LAST_INSERT_ID() + + insert into kg_scheduler_task (gmt_create, gmt_modified, project_id, job_id, instance_id, type, title, status, execute_num, + begin_time, finish_time, estimate_finish_time, trace_log, lock_time, resource, input, output, node_id, extension) + values (CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, #{projectId,jdbcType=BIGINT}, #{jobId,jdbcType=BIGINT}, #{instanceId,jdbcType=BIGINT}, + #{type,jdbcType=VARCHAR}, #{title,jdbcType=VARCHAR}, #{status,jdbcType=VARCHAR}, #{executeNum,jdbcType=INTEGER}, #{beginTime,jdbcType=TIMESTAMP}, + #{finishTime,jdbcType=TIMESTAMP}, #{estimateFinishTime,jdbcType=TIMESTAMP}, #{traceLog,jdbcType=LONGVARCHAR}, #{lockTime,jdbcType=TIMESTAMP}, + #{resource,jdbcType=VARCHAR}, #{input,jdbcType=LONGVARCHAR}, #{output,jdbcType=LONGVARCHAR}, #{nodeId,jdbcType=VARCHAR}, #{extension,jdbcType=LONGVARCHAR} ) + + + + delete from kg_scheduler_task where job_id = #{jobId,jdbcType=BIGINT} + + + + delete from kg_scheduler_task where id = #{id,jdbcType=BIGINT} + + + + update kg_scheduler_task + + gmt_modified = CURRENT_TIMESTAMP, + + project_id = #{projectId,jdbcType=BIGINT}, + + + job_id = #{jobId,jdbcType=BIGINT}, + + + instance_id = #{instanceId,jdbcType=BIGINT}, + + + type = #{type,jdbcType=VARCHAR}, + + + title = #{title,jdbcType=VARCHAR}, + + + status = #{status,jdbcType=VARCHAR}, + + + execute_num = #{executeNum,jdbcType=INTEGER}, + + + begin_time = #{beginTime,jdbcType=TIMESTAMP}, + + + finish_time = #{finishTime,jdbcType=TIMESTAMP}, + + + estimate_finish_time = #{estimateFinishTime,jdbcType=TIMESTAMP}, + + + trace_log = #{traceLog,jdbcType=LONGVARCHAR}, + + + lock_time = #{lockTime,jdbcType=TIMESTAMP}, + + + resource = #{resource,jdbcType=VARCHAR}, + + + input = #{input,jdbcType=LONGVARCHAR}, + + + output = #{output,jdbcType=LONGVARCHAR}, + + + node_id = #{nodeId,jdbcType=VARCHAR}, + + + extension = #{extension,jdbcType=LONGVARCHAR}, + + + where id = #{id,jdbcType=BIGINT} + + + + + + + and project_id = #{projectId,jdbcType=BIGINT} + + + and job_id = #{jobId,jdbcType=BIGINT} + + + and instance_id = #{instanceId,jdbcType=BIGINT} + + + and type = #{type,jdbcType=VARCHAR} + + + and title like concat('%', #{title,jdbcType=VARCHAR}, '%') + + + and status = #{status,jdbcType=VARCHAR} + + + and execute_num = #{executeNum,jdbcType=INTEGER} + + + and lock_time = #{lockTime,jdbcType=TIMESTAMP} + + + and resource like concat('%', #{resource,jdbcType=VARCHAR}, '%') + + + and node_id = #{nodeId,jdbcType=VARCHAR} + + + and extension like concat('%', #{extension,jdbcType=VARCHAR}, '%') + + + and gmt_create >= #{startCreateTime,jdbcType=TIMESTAMP} + + + and gmt_create <= #{endCreateTime,jdbcType=TIMESTAMP} + + + + + + + + + + + + + update kg_scheduler_task + set gmt_modified = CURRENT_TIMESTAMP, + status = #{status} + where instance_id = #{instanceId} + and status != 'FINISH' + + + + update kg_scheduler_task set lock_time = CURRENT_TIMESTAMP where id = #{id,jdbcType=BIGINT} and lock_time is NULL + + + + update kg_scheduler_task set lock_time = null where id = #{id,jdbcType=BIGINT} + + + \ No newline at end of file diff --git a/server/infra/dao/src/main/resources/mybatis-generator-config.xml b/server/infra/dao/src/main/resources/mybatis-generator-config.xml index 7f68772ec..f35cf4ec8 100644 --- a/server/infra/dao/src/main/resources/mybatis-generator-config.xml +++ b/server/infra/dao/src/main/resources/mybatis-generator-config.xml @@ -84,5 +84,21 @@
+ + + +
+ + + +
+ + + +
+ + + +
diff --git a/server/pom.xml b/server/pom.xml index 08185293d..526fc8b28 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -155,6 +155,11 @@ test ${project.version}
+ + com.antgroup.openspgapp + common-util + ${project.version} + diff --git a/server/test/src/test/java/com/antgroup/openspg/test/kgschema/SPGSchemaFacadeTest.groovy b/server/test/src/test/java/com/antgroup/openspg/test/kgschema/SPGSchemaFacadeTest.groovy deleted file mode 100644 index 51f3ccb99..000000000 --- a/server/test/src/test/java/com/antgroup/openspg/test/kgschema/SPGSchemaFacadeTest.groovy +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright 2023 OpenSPG Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except - * in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License - * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express - * or implied. - */ - - -package com.antgroup.openspg.test.kgschema - -import com.antgroup.openspg.server.api.facade.ApiResponse -import com.antgroup.openspg.server.api.facade.dto.schema.request.BuiltInPropertyRequest -import com.antgroup.openspg.server.api.facade.dto.schema.request.ConceptRequest -import com.antgroup.openspg.server.api.facade.dto.schema.request.ProjectSchemaRequest -import com.antgroup.openspg.server.api.facade.dto.schema.request.RelationRequest -import com.antgroup.openspg.server.api.facade.dto.schema.request.SPGTypeRequest -import com.antgroup.openspg.server.api.facade.dto.schema.request.SchemaAlterRequest -import com.antgroup.openspg.server.api.http.client.HttpConceptFacade -import com.antgroup.openspg.server.api.http.client.HttpSchemaFacade -import com.antgroup.openspg.server.api.http.client.util.ConnectionInfo -import com.antgroup.openspg.server.api.http.client.util.HttpClientBootstrap -import com.antgroup.openspg.cloudext.impl.graphstore.tugraph.TuGraphStoreClient -import com.antgroup.openspg.server.common.service.datasource.DataSourceService -import com.antgroup.openspg.core.schema.model.alter.SchemaDraft -import com.antgroup.openspg.core.schema.model.predicate.Property -import com.antgroup.openspg.core.schema.model.predicate.Relation -import com.antgroup.openspg.core.schema.model.semantic.request.DefineDynamicTaxonomyRequest -import com.antgroup.openspg.core.schema.model.semantic.request.DefineLogicalCausationRequest -import com.antgroup.openspg.core.schema.model.semantic.request.RemoveDynamicTaxonomyRequest -import com.antgroup.openspg.core.schema.model.semantic.request.RemoveLogicalCausationRequest -import com.antgroup.openspg.core.schema.model.type.BaseSPGType -import com.antgroup.openspg.core.schema.model.type.ConceptList -import com.antgroup.openspg.core.schema.model.type.ProjectSchema -import com.antgroup.openspg.core.schema.model.type.SPGTypeEnum -import com.antgroup.openspg.test.sofaboot.SofaBootTestApplication -import org.mockito.Mockito -import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase -import org.springframework.boot.test.context.SpringBootTest -import org.springframework.boot.test.mock.mockito.MockBean -import spock.lang.Shared -import spock.lang.Specification - -import static org.junit.jupiter.api.Assertions.assertEquals -import static org.junit.jupiter.api.Assertions.assertNotNull - -@SpringBootTest(classes = SofaBootTestApplication, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT) -@AutoConfigureTestDatabase -class SPGSchemaFacadeTest extends Specification { - @Shared - Long projectId = 1L - @Shared - spgSchemaFacade = new HttpSchemaFacade() - @Shared - conceptFacade = new HttpConceptFacade() - - @MockBean - private DataSourceService dataSourceService - - def setupSpec() { - HttpClientBootstrap.init(new ConnectionInfo("http://127.0.0.1:8887") - .setConnectTimeout(60000).setReadTimeout(60000) - ) - } - - /** - * step 1: query project schema, check system built-in BasicType and Standard is inited; - * step 2: create new StandardType、EntityType、ConceptType、EventType; - * step 3: define taxonomy semantic and logic causation semantic on concept; - * step 4: update or delete some StandardType、EntityType、ConceptType、EventType; - * step 5: query a single spg type, query a single relation; - * step 6: delete all customized StandardType、EntityType、ConceptType、EventType; - */ - def "test"() { - given: - Mockito.doReturn(Mock(TuGraphStoreClient.class)) - .when(dataSourceService) - .buildSharedKgStoreClient() - Mockito.doReturn(Mock(ElasticSearchEngineClient.class)) - .when(dataSourceService) - .buildSharedSearchEngineClient() - - when: - // step 1 - ProjectSchema projectSchema = this.getProjectSchema() - MockSchemaResultValidator.checkInitResult(projectSchema.getSpgTypes()) - - BuiltInPropertyRequest builtInPropertyRequest = new BuiltInPropertyRequest( - spgTypeEnum: SPGTypeEnum.CONCEPT_TYPE.name() - ) - ApiResponse> apiResponse = spgSchemaFacade.queryBuiltInProperty(builtInPropertyRequest) - assertEquals(2, apiResponse.getData().size()) - - // step 2 - SchemaDraft createDraft = MockSchemaDraftFactory.buildCreateDraft() - SchemaAlterRequest schemaAlterRequest = new SchemaAlterRequest( - projectId: projectId, schemaDraft: createDraft) - spgSchemaFacade.alterSchema(schemaAlterRequest) - - projectSchema = this.getProjectSchema() - MockSchemaResultValidator.checkCreateResult(projectSchema.getSpgTypes()) - - //step 3 - DefineDynamicTaxonomyRequest defineDynamicTaxonomyRequest1 = new DefineDynamicTaxonomyRequest( - conceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName(), - conceptName: "中产阶级", - dsl: "Define (s:DEFAULT.Person)-[p:belongTo]->(o:`DEFAULT.TaxonomyOfPerson`/`中产阶级`) " + - "{GraphStructure{} Rule{ R1: s.age >= 40 and s.age < 50}}") - conceptFacade.defineDynamicTaxonomy(defineDynamicTaxonomyRequest1) - - DefineDynamicTaxonomyRequest defineDynamicTaxonomyRequest2 = new DefineDynamicTaxonomyRequest( - conceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName(), - conceptName: "资产阶级", - dsl: "Define (s:DEFAULT.Person)-[p:belongTo]->" + - "(o:`DEFAULT.TaxonomyOfPerson`/`资产阶级`) " + - "{GraphStructure{} Rule{ R1: s.age >= 50}}") - conceptFacade.defineDynamicTaxonomy(defineDynamicTaxonomyRequest2) - - DefineLogicalCausationRequest defineLogicalCausationRequest = new DefineLogicalCausationRequest( - subjectConceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName(), - subjectConceptName: "中产阶级", - objectConceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName(), - objectConceptName: "资产阶级", - predicateName: "leadTo", - dsl: "Define (s:`DEFAULT.TaxonomyOfPerson`/`中产阶级`)-[p:leadTo]->" + - "(o:`DEFAULT.TaxonomyOfPerson`/`资产阶级`) " + - "{GraphStructure{} Rule{ R1: s.age=50} \n Action {}}") - conceptFacade.defineLogicalCausation(defineLogicalCausationRequest) - - ConceptRequest conceptRequest = new ConceptRequest( - conceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName(), - conceptName: "中产阶级" - ) - ApiResponse conceptResponse = conceptFacade.queryConcept(conceptRequest) - assertEquals(1, conceptResponse.getData().getConcepts().size()) - assertEquals(2, conceptResponse.getData().getConcepts().get(0).getSemantics().size()) - - RemoveDynamicTaxonomyRequest removeDynamicTaxonomyRequest = new RemoveDynamicTaxonomyRequest( - objectConceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName()) - conceptFacade.removeDynamicTaxonomy(removeDynamicTaxonomyRequest) - - RemoveLogicalCausationRequest removeLogicalCausationRequest = new RemoveLogicalCausationRequest( - subjectConceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName(), - subjectConceptName: "中产阶级", - objectConceptTypeName: MockSpgTypeNameEnum.DEFAULT_TAXOMOMY_OF_PERSON.getName(), - objectConceptName: "资产阶级", - predicateName: "leadTo") - conceptFacade.removeLogicalCausation(removeLogicalCausationRequest) - - conceptResponse = conceptFacade.queryConcept(conceptRequest) - assertEquals(0, conceptResponse.getData().getConcepts().size()) - - // step 4 - SchemaDraft updateDraft = MockSchemaDraftFactory.buildUpdateDraft(projectSchema) - schemaAlterRequest = new SchemaAlterRequest( - projectId: projectId, schemaDraft: updateDraft) - spgSchemaFacade.alterSchema(schemaAlterRequest) - - projectSchema = this.getProjectSchema() - MockSchemaResultValidator.checkUpdateResult(projectSchema.getSpgTypes()) - - // step 5 - SPGTypeRequest request = new SPGTypeRequest(name: MockSpgTypeNameEnum.DEFAULT_ALIPAY_USER.getName()) - ApiResponse response = spgSchemaFacade.querySPGType(request) - assertNotNull(response.getData()) - assertEquals(MockSpgTypeNameEnum.DEFAULT_ALIPAY_USER.getName(), response.getData().getName()) - - RelationRequest relationRequest = new RelationRequest( - sName: MockSpgTypeNameEnum.DEFAULT_ALIPAY_USER.getName(), - relation: "regAddress", - oName: MockSpgTypeNameEnum.DEFAULT_ADMINISTRATION.getName()) - ApiResponse relationResponse = spgSchemaFacade.queryRelation(relationRequest) - assertNotNull(relationResponse.getData()) - - // step 6 - projectSchema = this.getProjectSchema() - SchemaDraft deleteDraft = MockSchemaDraftFactory.buildDeleteDraft(projectSchema) - schemaAlterRequest = new SchemaAlterRequest( - projectId: projectId, schemaDraft: deleteDraft) - spgSchemaFacade.alterSchema(schemaAlterRequest) - - projectSchema = this.getProjectSchema() - MockSchemaResultValidator.checkInitResult(projectSchema.getSpgTypes()) - - then: - assertNotNull(this.getProjectSchema()) - } - - ProjectSchema getProjectSchema() { - ProjectSchemaRequest projectSchemaRequest = new ProjectSchemaRequest(projectId: projectId) - ApiResponse projectSchemaResponse = - spgSchemaFacade.queryProjectSchema(projectSchemaRequest) - - assertNotNull(projectSchemaResponse) - return projectSchemaResponse.getData() - } -} diff --git a/server/test/src/test/java/com/antgroup/openspg/test/scheduler/SchedulerServiceImplTest.java b/server/test/src/test/java/com/antgroup/openspg/test/scheduler/SchedulerServiceImplTest.java index eddb1693a..1716780b6 100644 --- a/server/test/src/test/java/com/antgroup/openspg/test/scheduler/SchedulerServiceImplTest.java +++ b/server/test/src/test/java/com/antgroup/openspg/test/scheduler/SchedulerServiceImplTest.java @@ -24,6 +24,9 @@ import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.LifeCycle; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.Status; import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum.TranslateType; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerInstanceQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerJobQuery; +import com.antgroup.openspg.server.core.scheduler.model.query.SchedulerTaskQuery; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerTask; @@ -75,16 +78,16 @@ void submitOnceJob() { Long jobId = job.getId(); assertTrue(jobId > 0); - SchedulerJob jobQuery = new SchedulerJob(); + SchedulerJobQuery jobQuery = new SchedulerJobQuery(); jobQuery.setId(jobId); - SchedulerInstance instanceQuery = new SchedulerInstance(); + SchedulerInstanceQuery instanceQuery = new SchedulerInstanceQuery(); instanceQuery.setJobId(jobId); - SchedulerTask taskQuery = new SchedulerTask(); + SchedulerTaskQuery taskQuery = new SchedulerTaskQuery(); taskQuery.setJobId(jobId); try { // step 2: query Jobs - List jobs = schedulerService.searchJobs(jobQuery); + List jobs = schedulerService.searchJobs(jobQuery).getResults(); assertEquals(1, jobs.size()); // step 3: offline job @@ -117,7 +120,8 @@ void submitOnceJob() { ThreadUtils.sleep(100); // step 7: get Instance to set Finish - List instances = schedulerService.searchInstances(instanceQuery); + List instances = + schedulerService.searchInstances(instanceQuery).getResults(); assertTrue(instances.size() > 0); SchedulerInstance instance = notFinishInstances.get(0); SchedulerInstance ins = schedulerService.getInstanceById(instance.getId()); @@ -154,14 +158,14 @@ void submitOnceJob() { ThreadUtils.sleep(100); // step 11: get tasks - List tasks = schedulerService.searchTasks(taskQuery); + List tasks = schedulerService.searchTasks(taskQuery).getResults(); assertTrue(tasks.size() > 0); } finally { // step 12: delete Job assertTrue(schedulerService.deleteJob(jobId)); - assertEquals(0, schedulerService.searchJobs(jobQuery).size()); - assertEquals(0, schedulerService.searchInstances(instanceQuery).size()); - assertEquals(0, schedulerService.searchTasks(taskQuery).size()); + assertEquals(0, schedulerService.searchJobs(jobQuery).getResults().size()); + assertEquals(0, schedulerService.searchInstances(instanceQuery).getResults().size()); + assertEquals(0, schedulerService.searchTasks(taskQuery).getResults().size()); } } @@ -186,18 +190,19 @@ void submitPeriodJob() { assertTrue(jobId > 0); ThreadUtils.sleep(100); - SchedulerJob jobQuery = new SchedulerJob(); + SchedulerJobQuery jobQuery = new SchedulerJobQuery(); jobQuery.setId(jobId); - SchedulerInstance instanceQuery = new SchedulerInstance(); + SchedulerInstanceQuery instanceQuery = new SchedulerInstanceQuery(); instanceQuery.setJobId(jobId); - SchedulerTask taskQuery = new SchedulerTask(); + SchedulerTaskQuery taskQuery = new SchedulerTaskQuery(); taskQuery.setJobId(jobId); try { // step 2: query Jobs and Instances - List jobs = schedulerService.searchJobs(jobQuery); + List jobs = schedulerService.searchJobs(jobQuery).getResults(); assertEquals(1, jobs.size()); - List instances = schedulerService.searchInstances(instanceQuery); + List instances = + schedulerService.searchInstances(instanceQuery).getResults(); assertEquals(24, instances.size()); ThreadUtils.sleep(100); @@ -255,15 +260,15 @@ void submitPeriodJob() { assertEquals(InstanceStatus.FINISH, instance.getStatus()); // step 8: get tasks - List tasks = schedulerService.searchTasks(taskQuery); + List tasks = schedulerService.searchTasks(taskQuery).getResults(); assertTrue(tasks.size() > 0); } finally { // step 9: delete Job assertTrue(schedulerService.deleteJob(jobId)); - assertEquals(0, schedulerService.searchJobs(jobQuery).size()); - assertEquals(0, schedulerService.searchInstances(instanceQuery).size()); - assertEquals(0, schedulerService.searchTasks(taskQuery).size()); + assertEquals(0, schedulerService.searchJobs(jobQuery).getResults().size()); + assertEquals(0, schedulerService.searchInstances(instanceQuery).getResults().size()); + assertEquals(0, schedulerService.searchTasks(taskQuery).getResults().size()); } } @@ -286,18 +291,19 @@ void submitRealTimeJob() { assertTrue(jobId > 0); ThreadUtils.sleep(100); - SchedulerJob jobQuery = new SchedulerJob(); + SchedulerJobQuery jobQuery = new SchedulerJobQuery(); jobQuery.setId(jobId); - SchedulerInstance instanceQuery = new SchedulerInstance(); + SchedulerInstanceQuery instanceQuery = new SchedulerInstanceQuery(); instanceQuery.setJobId(jobId); - SchedulerTask taskQuery = new SchedulerTask(); + SchedulerTaskQuery taskQuery = new SchedulerTaskQuery(); taskQuery.setJobId(jobId); try { // step 2: query Jobs and Instances - List jobs = schedulerService.searchJobs(jobQuery); + List jobs = schedulerService.searchJobs(jobQuery).getResults(); assertEquals(1, jobs.size()); - List instances = schedulerService.searchInstances(instanceQuery); + List instances = + schedulerService.searchInstances(instanceQuery).getResults(); assertEquals(1, instances.size()); ThreadUtils.sleep(100); @@ -333,15 +339,15 @@ void submitRealTimeJob() { assertEquals(InstanceStatus.RUNNING, instance.getStatus()); // step 6: get tasks - List tasks = schedulerService.searchTasks(taskQuery); + List tasks = schedulerService.searchTasks(taskQuery).getResults(); assertTrue(tasks.size() > 0); } finally { // step 7: delete Job assertTrue(schedulerService.deleteJob(jobId)); - assertEquals(0, schedulerService.searchJobs(jobQuery).size()); - assertEquals(0, schedulerService.searchInstances(instanceQuery).size()); - assertEquals(0, schedulerService.searchTasks(taskQuery).size()); + assertEquals(0, schedulerService.searchJobs(jobQuery).getResults().size()); + assertEquals(0, schedulerService.searchInstances(instanceQuery).getResults().size()); + assertEquals(0, schedulerService.searchTasks(taskQuery).getResults().size()); } } diff --git a/server/test/src/test/java/com/antgroup/openspg/test/scheduler/translate/LocalExampleTranslateMock.java b/server/test/src/test/java/com/antgroup/openspg/test/scheduler/translate/LocalExampleTranslateMock.java index ead50c99f..b5980697f 100644 --- a/server/test/src/test/java/com/antgroup/openspg/test/scheduler/translate/LocalExampleTranslateMock.java +++ b/server/test/src/test/java/com/antgroup/openspg/test/scheduler/translate/LocalExampleTranslateMock.java @@ -12,6 +12,8 @@ */ package com.antgroup.openspg.test.scheduler.translate; +import com.antgroup.openspg.server.common.model.scheduler.SchedulerEnum; +import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerInstance; import com.antgroup.openspg.server.core.scheduler.model.service.SchedulerJob; import com.antgroup.openspg.server.core.scheduler.model.task.TaskExecuteDag; import com.antgroup.openspg.server.core.scheduler.service.translate.Translate; @@ -19,7 +21,7 @@ import java.util.List; import org.springframework.stereotype.Component; -/** scheduler Translate Local implementation class. SchedulerJob to TaskDag */ +/** scheduler Translate Local implementation class. SchedulerJobDO to TaskDag */ @Component("localExampleTranslate") public class LocalExampleTranslateMock implements Translate { @@ -28,6 +30,10 @@ public TaskExecuteDag translate(SchedulerJob schedulerJob) { return getTaskDag(); } + @Override + public void statusCallback( + SchedulerJob job, SchedulerInstance instance, SchedulerEnum.InstanceStatus instanceStatus) {} + /** get Local Example TaskDag */ public TaskExecuteDag getTaskDag() { diff --git a/server/test/src/test/resources/config/application.properties b/server/test/src/test/resources/config/application.properties index 16c7be426..74b0e830f 100644 --- a/server/test/src/test/resources/config/application.properties +++ b/server/test/src/test/resources/config/application.properties @@ -49,8 +49,5 @@ cloudext.repository.impl.jdbc.driver=com.mysql.jdbc.Driver # Scheduler scheduler.handler.type=db scheduler.metadata.store.type=local -scheduler.execute.instances.period=5 -scheduler.execute.instances.unit=MINUTES -scheduler.generate.instances.period=1 -scheduler.generate.instances.unit=HOURS +scheduler.handler.process.period=300 scheduler.execute.max.day=10