Skip to content

Commit 9e786b1

Browse files
committed
updates
1 parent a3e0b56 commit 9e786b1

File tree

2 files changed

+47
-10
lines changed

2 files changed

+47
-10
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ Based on user input, removes all key-value pairs with the specified key and valu
7474
Based on a specified CSV file of DSpace item handles and replacement file names, replaces the name of bitstreams attached to the specified items.
7575

7676
#### [generateCollectionLevelAbstract.py](generateCollectionLevelAbstract.py)
77-
Based on user input, creates an HTML collection-level abstract that contains hyperlinks to all of the items in each series, as found in the metadata CSV. This assumes that the series title is recorded in 'dc.relation.ispartof' or a similar property in the DSpace item records.
77+
Based on user input, creates an HTML collection-level abstract that contains hyperlinks to all of the items in each series, as found in the metadata CSV. This assumes that the series title is recorded in 'dc.relation.ispartof' or a similar property in the DSpace item records. The abstract is then posted to the collection in DSpace.
7878

7979
#### [overwriteExistingMetadata.py](overwriteExistingMetadata.py)
8080
Based on a specified CSV file of DSpace item handles and file identifiers, replaces the metadata of the items with specified handles with the set of metadata elements associated with the corresponding file identifier in a JSON file of metadata entries named 'metadataOverwrite.json.'

generateCollectionLevelAbstract.py

Lines changed: 46 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,56 @@
1+
import json
2+
import requests
3+
import secrets
14
import csv
25
import argparse
6+
import urllib3
37

48
parser = argparse.ArgumentParser()
59
parser.add_argument('-f', '--fileNameCSV', help='the metadata CSV file. optional - if not provided, the script will ask for input')
6-
parser.add_argument('-b', '--baseURL', help='the base URL to use for the series links. optional - if not provided, the script will ask for input')
710
parser.add_argument('-i', '--handle', help='handle of the collection. optional - if not provided, the script will ask for input')
811
args = parser.parse_args()
912

1013
if args.fileNameCSV:
1114
fileNameCSV =args.fileNameCSV
1215
else:
1316
fileNameCSV = raw_input('Enter the metadata CSV file (including \'.csv\'): ')
14-
if args.baseURL:
15-
baseURL =args.baseURL
16-
else:
17-
baseURL = raw_input('Enter the base URL to use for the series links: ')
1817
if args.handle:
1918
handle = args.handle
2019
else:
2120
handle = raw_input('Enter collection handle: ')
2221

23-
handle = handle.replace('/', '%2F')
22+
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
23+
24+
secretsVersion = raw_input('To edit production server, enter the name of the secrets file: ')
25+
if secretsVersion != '':
26+
try:
27+
secrets = __import__(secretsVersion)
28+
print 'Editing Production'
29+
except ImportError:
30+
print 'Editing Stage'
31+
else:
32+
print 'Editing Stage'
33+
34+
baseURL = secrets.baseURL
35+
email = secrets.email
36+
password = secrets.password
37+
filePath = secrets.filePath
38+
verify = secrets.verify
39+
40+
data = {'email':email,'password':password}
41+
header = {'content-type':'application/json','accept':'application/json'}
42+
session = requests.post(baseURL+'/rest/login', headers=header, verify=verify, params=data).cookies['JSESSIONID']
43+
cookies = {'JSESSIONID': session}
44+
headerFileUpload = {'accept':'application/json'}
45+
cookiesFileUpload = cookies
46+
status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies, verify=verify).json()
47+
userFullName = status['fullname']
48+
print 'authenticated'
49+
50+
endpoint = baseURL+'/rest/handle/'+handle
51+
collection = requests.get(endpoint, headers=header, cookies=cookies, verify=verify).json()
52+
collectionID = collection['uuid']
53+
print collection
2454

2555
#Enter abstract text here
2656
abstractText = ''
@@ -37,12 +67,19 @@
3767
seriesLinks = ''
3868

3969
for seriesTitle in seriesTitles:
70+
handleEdited = handle.replace('/', '%2F')
4071
editedSeriesTitle = seriesTitle.replace(' ','+')
41-
seriesLink = '<li><a href="'+baseURL+'discover?scope='+handle+'&query=%22'+editedSeriesTitle+'%22&sort_by=dc.title_sort&order=asc&submit=">'+seriesTitle+'</a></li>'
72+
seriesLink = '<li><a href="'+baseURL+'/discover?scope='+handleEdited+'&query=%22'+editedSeriesTitle+'%22&sort_by=dc.title_sort&order=asc&submit=">'+seriesTitle+'</a></li>'
4273
seriesLinks += seriesLink
4374

4475
abstractText = '<p>'+abstractText+'</p>'
4576
seriesLinks = '<ul>'+seriesLinks+'</ul>'
77+
introductoryText = abstractText + seriesLinks
78+
79+
collection['introductoryText'] = introductoryText
80+
collection = json.dumps(collection)
81+
print collection
82+
post = requests.put(baseURL+'/rest/collections/'+collectionID, headers=header, cookies=cookies, verify=verify, data=collection)
83+
print post
4684

47-
f = open('collectionLevelAbstract.txt', 'wb')
48-
f.write(abstractText + seriesLinks)
85+
logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies, verify=verify)

0 commit comments

Comments
 (0)