diff --git a/release.notes b/release.notes index ec99a5d946e..94561812ba6 100644 --- a/release.notes +++ b/release.notes @@ -193,6 +193,22 @@ NEW: (#4910) --runslow option on unit tests to allow faster local tests NEW: (#4938) added a helloworld test for the (yet to be implemented) cloud testing in certification CHANGE: (#4968) Change the defaults for tests (to MySQL 8 and ES 7) +[v7r1p40] + +*Core +CHANGE: (#5123) Read X509_CERT_DIR from central place + +*WorkloadManagement +CHANGE: (#5143) Clarify that "tarfile failed with message" can be normal +FIX: (#5146) SiteDirector._getPilotsWeMayWantToSubmit: fix return type in case of error + +*DataManagement +CHANGE: (#5144) FileManagerPS has bulk method for getReplicas + +*Resources +CHANGE: (#5137) HTCondorCE submits jobs with -spool option when a local schedd is used to + spool the pilot wrappers that can be deleted afterwards + [v7r1p39] *WMS diff --git a/src/DIRAC/DataManagementSystem/DB/FileCatalogComponents/FileManager/FileManagerPs.py b/src/DIRAC/DataManagementSystem/DB/FileCatalogComponents/FileManager/FileManagerPs.py index d2b402a0444..1774b274902 100755 --- a/src/DIRAC/DataManagementSystem/DB/FileCatalogComponents/FileManager/FileManagerPs.py +++ b/src/DIRAC/DataManagementSystem/DB/FileCatalogComponents/FileManager/FileManagerPs.py @@ -778,51 +778,51 @@ def _setFileParameter(self, fileID, paramName, paramValue, connection=False): # _getFileReplicas related methods # - def _getFileReplicas(self, fileIDs, fields_input=['PFN'], allStatus=False, connection=False): + def _getFileReplicas(self, fileIDs, fields_input=None, allStatus=False, connection=False): """ Get replicas for the given list of files specified by their fileIDs :param fileIDs : list of file ids - :param fields_input : metadata of the Replicas we are interested in + :param fields_input : metadata of the Replicas we are interested in (default to PFN) :param allStatus : if True, all the Replica statuses will be considered, otherwise, only the db.visibleReplicaStatus :returns S_OK with a dict { fileID : { SE name : dict of metadata } } """ - connection = self._getConnection(connection) + if fields_input is None: + fields_input = ['PFN'] fields = list(fields_input) + # always add Status in the list of required fields if 'Status' not in fields: fields.append('Status') - replicas = {} + # We initialize the dictionary with empty dict + # as default value, because this is what we want for + # non existing replicas + replicas = {fileID: {} for fileID in fileIDs} # Format the status to be used in a IN clause in the stored procedure fStatus = stringListToString(self.db.visibleReplicaStatus) fieldNames = ["FileID", "SE", "Status", "RepType", "CreationDate", "ModificationDate", "PFN"] - for fileID in fileIDs: - - result = self.db.executeStoredProcedureWithCursor('ps_get_all_info_of_replicas', - (fileID, allStatus, fStatus)) + for chunks in breakListIntoChunks(fileIDs, 1000): + # Format the FileIDs to be used in a IN clause in the stored procedure + formatedFileIds = intListToString(chunks) + result = self.db.executeStoredProcedureWithCursor('ps_get_all_info_of_replicas_bulk', + (formatedFileIds, allStatus, fStatus)) if not result['OK']: return result rows = result['Value'] - if not rows: - replicas[fileID] = {} - for row in rows: - rowDict = dict(zip(fieldNames, row)) - - # Returns only the required metadata se = rowDict["SE"] - repForFile = replicas.setdefault(fileID, {}) - repForFile[se] = dict((key, rowDict.get(key, "Unknown metadata field")) for key in fields) + fileID = rowDict['FileID'] + replicas[fileID][se] = dict((key, rowDict.get(key, "Unknown metadata field")) for key in fields) return S_OK(replicas) diff --git a/src/DIRAC/DataManagementSystem/DB/FileCatalogWithFkAndPsDB.sql b/src/DIRAC/DataManagementSystem/DB/FileCatalogWithFkAndPsDB.sql index d63b6684d46..6f1aefab50e 100755 --- a/src/DIRAC/DataManagementSystem/DB/FileCatalogWithFkAndPsDB.sql +++ b/src/DIRAC/DataManagementSystem/DB/FileCatalogWithFkAndPsDB.sql @@ -1322,6 +1322,7 @@ DELIMITER ; +-- TO BE DEPRECATED IN FAVOR OF THE BULK METHOD -- ps_get_all_info_of_replicas : get the info of all replicas of a given file -- -- file_id : id of the file @@ -1353,7 +1354,7 @@ BEGIN JOIN FC_StorageElements se on r.SEID = se.SEID JOIN FC_Statuses st on r.Status = st.StatusID WHERE FileID =',file_id, - ' and st.Status in (',visibleReplicaStatus,') '); + ' AND st.Status IN (', visibleReplicaStatus, ')'); PREPARE stmt FROM @sql; EXECUTE stmt; @@ -1364,6 +1365,52 @@ BEGIN END // DELIMITER ; +-- ps_get_all_info_of_replicas_bulk : get the info of all replicas for a list of file ids +-- +-- file_ids : list of file IDs +-- allStatus : if False, consider visibleReplicaStatus +-- visibleReplicaStatus : list of status we are interested in +-- +-- output : FileID, se.SEName, st.Status, RepType, CreationDate, ModificationDate, PFN + +DROP PROCEDURE IF EXISTS ps_get_all_info_of_replicas_bulk; +DELIMITER // +CREATE PROCEDURE ps_get_all_info_of_replicas_bulk +(IN file_ids TEXT, IN allStatus BOOLEAN, IN visibleReplicaStatus TEXT) +BEGIN + + + IF allStatus THEN + + SET @sql = CONCAT('SELECT SQL_NO_CACHE FileID, se.SEName, st.Status, RepType, CreationDate, ModificationDate, PFN + FROM FC_Replicas r + JOIN FC_StorageElements se on r.SEID = se.SEID + JOIN FC_Statuses st on r.Status = st.StatusID + WHERE FileID IN (', file_ids, ')'); + PREPARE stmt FROM @sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + ELSE + + SET @sql = CONCAT( + 'SELECT SQL_NO_CACHE FileID, se.SEName, st.Status, RepType, CreationDate, ModificationDate, PFN + FROM FC_Replicas r + JOIN FC_StorageElements se on r.SEID = se.SEID + JOIN FC_Statuses st on r.Status = st.StatusID + WHERE FileID IN (', file_ids, ') ', + 'AND st.Status IN (', visibleReplicaStatus, ')'); + + PREPARE stmt FROM @sql; + EXECUTE stmt; + DEALLOCATE PREPARE stmt; + + END IF; + +END // +DELIMITER ; + + -- ps_get_all_directory_info : get all the info of a given directory -- -- dir_name : name of the directory diff --git a/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py b/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py index 6bf4b726294..baa9bca4672 100755 --- a/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py +++ b/src/DIRAC/FrameworkSystem/scripts/dirac_proxy_init.py @@ -27,6 +27,7 @@ from DIRAC.Core.Utilities.DIRACScript import DIRACScript from DIRAC.FrameworkSystem.Client import ProxyGeneration, ProxyUpload from DIRAC.Core.Security import X509Chain, ProxyInfo, Properties, VOMS +from DIRAC.Core.Security.Locations import getCAsLocation from DIRAC.ConfigurationSystem.Client.Helpers import Registry from DIRAC.FrameworkSystem.Client.BundleDeliveryClient import BundleDeliveryClient @@ -190,10 +191,10 @@ def printInfo(self): self.__uploadedInfo[userDN][group].strftime("%Y/%m/%d %H:%M"))) def checkCAs(self): - if "X509_CERT_DIR" not in os.environ: - gLogger.warn("X509_CERT_DIR is unset. Abort check of CAs") + caDir = getCAsLocation() + if not caDir: + gLogger.warn("No valid CA dir found.") return - caDir = os.environ["X509_CERT_DIR"] # In globus standards .r0 files are CRLs. They have the same names of the CAs but diffent file extension searchExp = os.path.join(caDir, "*.r0") crlList = glob.glob(searchExp) diff --git a/src/DIRAC/WorkloadManagementSystem/Agent/SiteDirector.py b/src/DIRAC/WorkloadManagementSystem/Agent/SiteDirector.py index 35f10f92945..3b28b523782 100644 --- a/src/DIRAC/WorkloadManagementSystem/Agent/SiteDirector.py +++ b/src/DIRAC/WorkloadManagementSystem/Agent/SiteDirector.py @@ -786,7 +786,7 @@ def _getPilotsWeMayWantToSubmit(self, ceDict): result = self.matcherClient.getMatchingTaskQueues(ceDict) if not result['OK']: self.log.error('Could not retrieve TaskQueues from TaskQueueDB', result['Message']) - return result + return 0, {} taskQueueDict = result['Value'] if not taskQueueDict: self.log.verbose('No matching TQs found', diff --git a/src/DIRAC/WorkloadManagementSystem/Utilities/PilotWrapper.py b/src/DIRAC/WorkloadManagementSystem/Utilities/PilotWrapper.py index b456ad97923..a82aec0fbfb 100644 --- a/src/DIRAC/WorkloadManagementSystem/Utilities/PilotWrapper.py +++ b/src/DIRAC/WorkloadManagementSystem/Utilities/PilotWrapper.py @@ -202,21 +202,19 @@ def pilotWrapperScript(pilotFilesCompressedEncodedDict=None, pt.extractall() pt.close() except Exception as x: - print("tarfile failed with message %%s" %% repr(x), file=sys.stderr) - logger.error("tarfile failed with message %%s" %% repr(x)) - logger.warning("Trying tar command (tar -xvf pilot.tar)") + print("tarfile failed with message (this is normal!) %%s" %% repr(x), file=sys.stderr) + logger.error("tarfile failed with message (this is normal!) %%s" %% repr(x)) + logger.warn("Trying tar command (tar -xvf pilot.tar)") res = os.system("tar -xvf pilot.tar") if res: - logger.error("tar failed with exit code %%d, giving up" %% int(res)) - print("tar failed with exit code %%d, giving up" %% int(res), file=sys.stderr) + logger.error("tar failed with exit code %%d, giving up (this is normal!)" %% int(res)) + print("tar failed with exit code %%d, giving up (this is normal!)" %% int(res), file=sys.stderr) raise # if we get here we break out of the loop of locations break - except url_library_URLError as e: - print('%%s unreachable' %% loc, file=sys.stderr) - logger.error('%%s unreachable' %% loc) - logger.exception(e) - except Exception as e: + except (url_library_URLError, Exception) as e: + print('%%s unreacheable (this is normal!)' %% loc, file=sys.stderr) + logger.error('%%s unreacheable (this is normal!)' %% loc) logger.exception(e) else: