|
1 | 1 | from pyDataverse.api import NativeApi, DataAccessApi
|
2 | 2 | from pyDataverse.models import Datafile
|
3 |
| -from os.path import isdir, isfile, join |
| 3 | +from os.path import isdir, join |
| 4 | +from time import sleep |
4 | 5 | from os import walk
|
| 6 | +import argparse |
5 | 7 | import requests
|
6 |
| -import hashlib |
7 | 8 | import sys
|
8 | 9 |
|
9 |
| -dataverse_token = sys.argv[1] |
10 |
| -dataverse_server = sys.argv[2].strip("/") |
11 |
| -dataverse_dataset_doi = sys.argv[3] |
12 |
| -github_repository = sys.argv[4] |
| 10 | +def parse_arguments(): |
| 11 | + parser = argparse.ArgumentParser() |
13 | 12 |
|
14 |
| -api = NativeApi(dataverse_server, dataverse_token) |
15 |
| -data_api = DataAccessApi(dataverse_server) |
| 13 | + # Mandatory arguments |
| 14 | + parser.add_argument("token", help="Dataverse token.") |
| 15 | + parser.add_argument("server", help="Dataverse server.") |
| 16 | + parser.add_argument("doi", help="Dataset DOI.") |
| 17 | + parser.add_argument("repo", help="GitHub repository.") |
16 | 18 |
|
17 |
| -# the following deletes all the files in the dataset |
| 19 | + # Optional arguments |
| 20 | + parser.add_argument("-d", "--dir", help="Uploads only a specific dir.") |
| 21 | + |
| 22 | + args = parser.parse_args() |
| 23 | + return args |
| 24 | + |
| 25 | +if __name__ == '__main__': |
| 26 | + args = parse_arguments() |
| 27 | + dataverse_server = args.server.strip("/") |
| 28 | + api = NativeApi(dataverse_server , args.token) |
| 29 | + data_api = DataAccessApi(dataverse_server) |
| 30 | + |
| 31 | + # the following deletes all the files in the dataset |
| 32 | + |
| 33 | + dataset = api.get_dataset(args.doi) |
| 34 | + files_list = dataset.json()['data']['latestVersion']['files'] |
| 35 | + |
| 36 | + delete_api = dataverse_server + \ |
| 37 | + '/dvn/api/data-deposit/v1.1/swordv2/edit-media/file/' |
| 38 | + for f in files_list: |
| 39 | + fileid = f["dataFile"]["id"] |
| 40 | + resp = requests.delete( |
| 41 | + delete_api + str(fileid), \ |
| 42 | + auth = (args.token , "")) |
| 43 | + |
| 44 | + # the following adds all files from the repository to Dataverse |
| 45 | + |
| 46 | + path = join('repo',args.dir) if args.dir else 'repo' |
| 47 | + for root, subdirs, files in walk(path): |
| 48 | + if '.git' in subdirs: |
| 49 | + subdirs.remove('.git') |
| 50 | + if '.github' in subdirs: |
| 51 | + subdirs.remove('.github') |
| 52 | + for f in files: |
| 53 | + df = Datafile() |
| 54 | + df.set({ |
| 55 | + "pid" : args.doi, |
| 56 | + "filename" : f, |
| 57 | + "directoryLabel": root[5:], |
| 58 | + "description" : \ |
| 59 | + "Uploaded with GitHub Action from {}.".format( |
| 60 | + args.repo), |
| 61 | + }) |
| 62 | + resp = api.upload_datafile( |
| 63 | + args.doi, join(root,f), df.json()) |
| 64 | + sleep(0.05) # give some time to upload |
| 65 | + |
| 66 | + # publish updated dataset |
| 67 | + |
| 68 | + resp = api.publish_dataset(args.doi, release_type="major") |
18 | 69 |
|
19 |
| -dataset = api.get_dataset(dataverse_dataset_doi) |
20 |
| -files_list = dataset.json()['data']['latestVersion']['files'] |
21 |
| - |
22 |
| -delete_api = dataverse_server + \ |
23 |
| - '/dvn/api/data-deposit/v1.1/swordv2/edit-media/file/' |
24 |
| -for f in files_list: |
25 |
| - fileid = f["dataFile"]["id"] |
26 |
| - resp = requests.delete( |
27 |
| - delete_api + str(fileid), \ |
28 |
| - auth = (dataverse_token , "")) |
29 |
| - |
30 |
| -# the following adds all files from the repository to Dataverse |
31 |
| - |
32 |
| -for root, subdirs, files in walk('repo'): |
33 |
| - if '.git' in subdirs: |
34 |
| - subdirs.remove('.git') |
35 |
| - if '.github' in subdirs: |
36 |
| - subdirs.remove('.github') |
37 |
| - for f in files: |
38 |
| - df = Datafile() |
39 |
| - df.set({ |
40 |
| - "pid" : dataverse_dataset_doi, |
41 |
| - "filename" : f, |
42 |
| - "directoryLabel": root[5:], |
43 |
| - "description" : \ |
44 |
| - "Uploaded with GitHub Action from {}.".format( |
45 |
| - github_repository), |
46 |
| - }) |
47 |
| - resp = api.upload_datafile( |
48 |
| - dataverse_dataset_doi, join(root,f), df.json()) |
49 |
| - |
50 |
| -# publish updated dataset |
51 |
| - |
52 |
| -resp = api.publish_dataset(dataverse_dataset_doi, release_type="major") |
|
0 commit comments