forked from ehanson8/dspace-editing
-
Notifications
You must be signed in to change notification settings - Fork 2
/
updateLanguageTagsForKey.py
98 lines (89 loc) · 3.69 KB
/
updateLanguageTagsForKey.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
import json
import requests
import secrets
import time
import csv
from datetime import datetime
import urllib3
import argparse
secretsVersion = input('To edit production server, enter secrets filename: ')
if secretsVersion != '':
try:
secrets = __import__(secretsVersion)
print('Editing Production')
except ImportError:
print('Editing Stage')
else:
print('Editing Stage')
parser = argparse.ArgumentParser()
parser.add_argument('-k', '--key', help='the key to be updated.')
args = parser.parse_args()
if args.key:
key = args.key
else:
key = input('Enter the key to be updated: ')
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
baseURL = secrets.baseURL
email = secrets.email
password = secrets.password
filePath = secrets.filePath
skippedCollections = secrets.skippedCollections
startTime = time.time()
data = {'email': email, 'password': password}
header = {'content-type': 'application/json', 'accept': 'application/json'}
session = requests.post(baseURL+'/rest/login', headers=header, params=data).cookies['JSESSIONID']
cookies = {'JSESSIONID': session}
headerFileUpload = {'accept': 'application/json'}
cookiesFileUpload = cookies
status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies).json()
print('authenticated')
f = csv.writer(open(filePath+'languageTagUpdate'+key+datetime.now().strftime('%Y-%m-%d %H.%M.%S')+'.csv', 'w'))
f.writerow(['itemID']+['key'])
offset = 0
recordsEdited = 0
items = ''
itemLinks = []
while items != []:
endpoint = baseURL+'/rest/filtered-items?query_field[]='+key+'&query_op[]=exists&query_val[]=&limit=200&offset='+str(offset)
print(endpoint)
response = requests.get(endpoint, headers=header, cookies=cookies).json()
items = response['items']
for item in items:
itemMetadataProcessed = []
itemLink = item['link']
itemLinks.append(itemLink)
offset = offset + 200
print(offset)
for itemLink in itemLinks:
itemMetadataProcessed = []
print(itemLink)
metadata = requests.get(baseURL + itemLink + '/metadata', headers=header, cookies=cookies).json()
for l in range(0, len(metadata)):
metadata[l].pop('schema', None)
metadata[l].pop('element', None)
metadata[l].pop('qualifier', None)
if metadata[l]['key'] == key and metadata[l]['language'] is None:
updatedMetadataElement = {}
updatedMetadataElement['key'] = metadata[l]['key']
updatedMetadataElement['value'] = metadata[l]['value']
updatedMetadataElement['language'] = 'en_US'
itemMetadataProcessed.append(updatedMetadataElement)
provNote = 'The language tag for \''+metadata[l]['key']+': '+metadata[l]['value']+'\' was changed from \'null\' to \'en_US\' through a batch process on '+datetime.now().strftime('%Y-%m-%d %H:%M:%S')+'.'
provNoteElement = {}
provNoteElement['key'] = 'dc.description.provenance'
provNoteElement['value'] = provNote
provNoteElement['language'] = 'en_US'
itemMetadataProcessed.append(provNoteElement)
else:
itemMetadataProcessed.append(metadata[l])
itemMetadataProcessed = json.dumps(itemMetadataProcessed)
delete = requests.delete(baseURL + itemLink + '/metadata', headers=header, cookies=cookies)
print(delete)
post = requests.put(baseURL + itemLink + '/metadata', headers=header, cookies=cookies, data=itemMetadataProcessed)
print(post)
f.writerow([itemLink]+[key])
logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies)
elapsedTime = time.time() - startTime
m, s = divmod(elapsedTime, 60)
h, m = divmod(m, 60)
print('Total script run time: ', '%d:%02d:%02d' % (h, m, s))