|
3 | 3 | import secrets |
4 | 4 | import csv |
5 | 5 | import time |
| 6 | +import urllib3 |
| 7 | + |
| 8 | +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) |
6 | 9 |
|
7 | 10 | secretsVersion = raw_input('To edit production server, enter the name of the secrets file: ') |
8 | 11 | if secretsVersion != '': |
|
13 | 16 | print 'Editing Stage' |
14 | 17 | else: |
15 | 18 | print 'Editing Stage' |
16 | | - |
| 19 | + |
17 | 20 | baseURL = secrets.baseURL |
18 | 21 | email = secrets.email |
19 | 22 | password = secrets.password |
20 | 23 | filePath = secrets.filePath |
21 | 24 | verify = secrets.verify |
22 | 25 |
|
23 | | -requests.packages.urllib3.disable_warnings() |
24 | | - |
25 | | -communityID = raw_input('Enter community ID: ') |
| 26 | +communityHandle = raw_input('Enter community handle: ') |
26 | 27 | key = raw_input('Enter first key: ') |
27 | 28 | key2 = raw_input('Enter second key: ') |
28 | 29 |
|
29 | 30 | startTime = time.time() |
30 | | -data = json.dumps({'email':email,'password':password}) |
| 31 | +data = {'email':email,'password':password} |
31 | 32 | header = {'content-type':'application/json','accept':'application/json'} |
32 | | -session = requests.post(baseURL+'/rest/login', headers=header, verify=verify, data=data).content |
33 | | -headerAuth = {'content-type':'application/json','accept':'application/json', 'rest-dspace-token':session} |
| 33 | +session = requests.post(baseURL+'/rest/login', headers=header, verify=verify, params=data).cookies['JSESSIONID'] |
| 34 | +cookies = {'JSESSIONID': session} |
| 35 | +headerFileUpload = {'accept':'application/json'} |
| 36 | +cookiesFileUpload = cookies |
| 37 | +status = requests.get(baseURL+'/rest/status', headers=header, cookies=cookies, verify=verify).json() |
34 | 38 | print 'authenticated' |
35 | 39 |
|
| 40 | +endpoint = baseURL+'/rest/handle/'+communityHandle |
| 41 | +community = requests.get(endpoint, headers=header, cookies=cookies, verify=verify).json() |
| 42 | +communityID = community['uuid'] |
| 43 | + |
36 | 44 | itemList = [] |
37 | 45 | endpoint = baseURL+'/rest/communities' |
38 | | -collections = requests.get(baseURL+'/rest/communities/'+str(communityID)+'/collections', headers=headerAuth, verify=verify).json() |
| 46 | +collections = requests.get(baseURL+'/rest/communities/'+str(communityID)+'/collections', headers=header, cookies=cookies, verify=verify).json() |
39 | 47 | for j in range (0, len (collections)): |
40 | | - collectionID = collections[j]['id'] |
41 | | - if collectionID != 24: |
| 48 | + collectionID = collections[j]['uuid'] |
| 49 | + print collectionID |
| 50 | + if collectionID != '4dccec82-4cfb-4583-a728-2cb823b15ef0': |
42 | 51 | offset = 0 |
43 | 52 | items = '' |
44 | 53 | while items != []: |
45 | | - items = requests.get(baseURL+'/rest/collections/'+str(collectionID)+'/items?limit=1000&offset='+str(offset), headers=headerAuth, verify=verify) |
| 54 | + items = requests.get(baseURL+'/rest/collections/'+str(collectionID)+'/items?limit=200&offset='+str(offset), headers=header, cookies=cookies, verify=verify) |
46 | 55 | while items.status_code != 200: |
47 | 56 | time.sleep(5) |
48 | | - items = requests.get(baseURL+'/rest/collections/'+str(collectionID)+'/items?limit=1000&offset='+str(offset), headers=headerAuth, verify=verify) |
| 57 | + items = requests.get(baseURL+'/rest/collections/'+str(collectionID)+'/items?limit=200&offset='+str(offset), headers=header, cookies=cookies, verify=verify) |
49 | 58 | items = items.json() |
50 | 59 | for k in range (0, len (items)): |
51 | | - itemID = items[k]['id'] |
| 60 | + itemID = items[k]['uuid'] |
52 | 61 | itemList.append(itemID) |
53 | | - offset = offset + 1000 |
| 62 | + offset = offset + 200 |
54 | 63 | elapsedTime = time.time() - startTime |
55 | 64 | m, s = divmod(elapsedTime, 60) |
56 | 65 | h, m = divmod(m, 60) |
|
60 | 69 | for number, itemID in enumerate(itemList): |
61 | 70 | itemsRemaining = len(itemList) - number |
62 | 71 | print 'Items remaining: ', itemsRemaining, 'ItemID: ', itemID |
63 | | - metadata = requests.get(baseURL+'/rest/items/'+str(itemID)+'/metadata', headers=headerAuth, verify=verify).json() |
| 72 | + metadata = requests.get(baseURL+'/rest/items/'+str(itemID)+'/metadata', headers=header, cookies=cookies, verify=verify).json() |
64 | 73 | itemTuple = (itemID,) |
65 | 74 | tupleValue1 = '' |
66 | 75 | tupleValue2 = '' |
67 | 76 | for l in range (0, len (metadata)): |
68 | 77 | if metadata[l]['key'] == key: |
69 | | - metadataValue = metadata[l]['value'] |
| 78 | + metadataValue = metadata[l]['value'].encode('utf-8') |
70 | 79 | tupleValue1 = metadataValue |
71 | 80 | if metadata[l]['key'] == key2: |
72 | | - metadataValue = metadata[l]['value'] |
| 81 | + metadataValue = metadata[l]['value'].encode('utf-8') |
73 | 82 | tupleValue2 = metadataValue |
74 | 83 | itemTuple = itemTuple + (tupleValue1 , tupleValue2) |
75 | 84 | valueList.append(itemTuple) |
|
86 | 95 | for i in range (0, len (valueList)): |
87 | 96 | f.writerow([valueList[i][0]]+[valueList[i][1]]+[valueList[i][2]]) |
88 | 97 |
|
89 | | -logout = requests.post(baseURL+'/rest/logout', headers=headerAuth, verify=verify) |
| 98 | +logout = requests.post(baseURL+'/rest/logout', headers=header, cookies=cookies, verify=verify) |
90 | 99 |
|
91 | 100 | elapsedTime = time.time() - startTime |
92 | 101 | m, s = divmod(elapsedTime, 60) |
|
0 commit comments