|
| 1 | +import json |
| 2 | +import requests |
| 3 | +import secrets |
1 | 4 | import csv |
2 | 5 | import argparse |
| 6 | +import urllib3 |
3 | 7 |
|
4 | 8 | parser = argparse.ArgumentParser() |
5 | 9 | parser.add_argument('-f', '--fileNameCSV', help='the metadata CSV file. optional - if not provided, the script will ask for input') |
6 | | -parser.add_argument('-b', '--baseURL', help='the base URL to use for the series links. optional - if not provided, the script will ask for input') |
7 | 10 | parser.add_argument('-i', '--handle', help='handle of the collection. optional - if not provided, the script will ask for input') |
8 | 11 | args = parser.parse_args() |
9 | 12 |
|
10 | 13 | if args.fileNameCSV: |
11 | 14 | fileNameCSV =args.fileNameCSV |
12 | 15 | else: |
13 | 16 | fileNameCSV = raw_input('Enter the metadata CSV file (including \'.csv\'): ') |
14 | | -if args.baseURL: |
15 | | - baseURL =args.baseURL |
16 | | -else: |
17 | | - baseURL = raw_input('Enter the base URL to use for the series links: ') |
18 | 17 | if args.handle: |
19 | 18 | handle = args.handle |
20 | 19 | else: |
21 | 20 | handle = raw_input('Enter collection handle: ') |
22 | 21 |
|
23 | | -handle = handle.replace('/', '%2F') |
| 22 | +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) |
| 23 | + |
| 24 | +secretsVersion = raw_input('To edit production server, enter the name of the secrets file: ') |
| 25 | +if secretsVersion != '': |
| 26 | + try: |
| 27 | + secrets = __import__(secretsVersion) |
| 28 | + print 'Editing Production' |
| 29 | + except ImportError: |
| 30 | + print 'Editing Stage' |
| 31 | +else: |
| 32 | + print 'Editing Stage' |
| 33 | + |
| 34 | +baseURL = secrets.baseURL |
| 35 | +email = secrets.email |
| 36 | +password = secrets.password |
| 37 | +filePath = secrets.filePath |
| 38 | +verify = secrets.verify |
| 39 | + |
| 40 | +data = {'email':email,'password':password} |
| 41 | +header = {'content-type':'application/json','accept':'application/json'} |
| 42 | +session = requests.post(baseURL+'/rest/login', headers=header, verify=verify, params=data).cookies['JSESSIONID'] |
| 43 | +cookies = {'JSESSIONID': session} |
| 44 | +headerFileUpload = {'accept':'application/json'} |
| 45 | +cookiesFileUpload = cookies |
| 46 | +status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies, verify=verify).json() |
| 47 | +userFullName = status['fullname'] |
| 48 | +print 'authenticated' |
| 49 | + |
| 50 | +endpoint = baseURL+'/rest/handle/'+handle |
| 51 | +collection = requests.get(endpoint, headers=header, cookies=cookies, verify=verify).json() |
| 52 | +collectionID = collection['uuid'] |
| 53 | +print collection |
24 | 54 |
|
25 | 55 | #Enter abstract text here |
26 | 56 | abstractText = '' |
|
37 | 67 | seriesLinks = '' |
38 | 68 |
|
39 | 69 | for seriesTitle in seriesTitles: |
| 70 | + handleEdited = handle.replace('/', '%2F') |
40 | 71 | editedSeriesTitle = seriesTitle.replace(' ','+') |
41 | | - seriesLink = '<li><a href="'+baseURL+'discover?scope='+handle+'&query=%22'+editedSeriesTitle+'%22&sort_by=dc.title_sort&order=asc&submit=">'+seriesTitle+'</a></li>' |
| 72 | + seriesLink = '<li><a href="'+baseURL+'/discover?scope='+handleEdited+'&query=%22'+editedSeriesTitle+'%22&sort_by=dc.title_sort&order=asc&submit=">'+seriesTitle+'</a></li>' |
42 | 73 | seriesLinks += seriesLink |
43 | 74 |
|
44 | 75 | abstractText = '<p>'+abstractText+'</p>' |
45 | 76 | seriesLinks = '<ul>'+seriesLinks+'</ul>' |
| 77 | +introductoryText = abstractText + seriesLinks |
| 78 | + |
| 79 | +collection['introductoryText'] = introductoryText |
| 80 | +collection = json.dumps(collection) |
| 81 | +print collection |
| 82 | +post = requests.put(baseURL+'/rest/collections/'+collectionID, headers=header, cookies=cookies, verify=verify, data=collection) |
| 83 | +print post |
46 | 84 |
|
47 | | -f = open('collectionLevelAbstract.txt', 'wb') |
48 | | -f.write(abstractText + seriesLinks) |
| 85 | +logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies, verify=verify) |
0 commit comments