From 3db44862192196891a4d178d0ebce923bb8454ad Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Thu, 31 May 2018 16:02:59 -0400 Subject: [PATCH 001/701] Added new UI prototype to Upload Files pg. Code comments include some to-do's to tackle. [ref #4610] --- src/main/java/Bundle.properties | 5 +- .../iq/dataverse/EditDatafilesPage.java | 3 +- src/main/webapp/editFilesFragment.xhtml | 158 ++++++++++++++---- src/main/webapp/editdatafiles.xhtml | 31 +++- 4 files changed, 157 insertions(+), 40 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index fe87288740a..f4ee44d7f9c 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1226,7 +1226,8 @@ dataset.keywordDisplay.title=Keyword dataset.subjectDisplay.title=Subject dataset.contact.tip=Use email button above to contact. dataset.asterisk.tip=Asterisks indicate required fields -dataset.message.uploadFiles=Upload Dataset Files - You can drag and drop files from your desktop, directly into the upload widget. +dataset.message.uploadFiles.label=Upload Dataset Files +dataset.message.uploadFiles.message=For more information about supported file formats, please refer to the User Guide. dataset.message.editMetadata=Edit Dataset Metadata - Add more metadata about this dataset to help others easily find it. dataset.message.editTerms=Edit Dataset Terms - Update this dataset's terms of use. dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. @@ -1302,7 +1303,7 @@ file.count={0} {0, choice, 0#Files|1#File|2#Files} file.count.selected={0} {0, choice, 0#Files Selected|1#File Selected|2#Files Selected} file.selectToAddBtn=Select Files to Add file.selectToAdd.tipLimit=File upload limit is {0} bytes per file. -file.selectToAdd.tipMoreInformation=For more information about supported file formats, please refer to the User Guide. +file.selectToAdd.tipMoreInformation=Select or drag and drop files from your desktop, directly into the upload widget. file.selectToAdd.dragdropMsg=Drag and drop files here. file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. file.fromDropbox=Upload from Dropbox diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index f3f7a93978c..36f9090eba0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -20,6 +20,7 @@ import edu.harvard.iq.dataverse.ingest.IngestUtil; import edu.harvard.iq.dataverse.search.FileView; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; @@ -551,7 +552,7 @@ public String init() { saveEnabled = true; if (mode == FileEditMode.UPLOAD) { - JH.addMessage(FacesMessage.SEVERITY_INFO, getBundleString("dataset.message.uploadFiles")); + JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.uploadFiles.label"), BundleUtil.getStringFromBundle("dataset.message.uploadFiles.message", Arrays.asList(systemConfig.getGuidesBaseUrl(), systemConfig.getGuidesVersion()))); } if (settingsService.isTrueForKey(SettingsServiceBean.Key.PublicInstall, false)){ diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index cb651d8c5fb..621dcb4d643 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -20,8 +20,46 @@
+ - +
+ + + +
+ + + + + +
+ + +
+ +

+ Help message proving value and info about HTTP upload to the user. +

+ +

+ + + + + + + + +

+ -

- - - - - - - - - - - - -

+ + @@ -198,29 +224,100 @@ invalidSizeMessage="#{bundle['file.edit.error.file_exceeds_limit']}" sequential="true" widgetVar="fileUploadWidget"/> - -
-
-
+
+
+ + +
+ + + + + +
+
+ +

+ Help message proving value and info about Dropbox to the user. +

+ + +
+
+
+ +
+
-
+ +
+ + + + + +
+
+

+ Help message proving value and info about rsync to the user. +

+

+ Warning message about how this option is not available because of uploaded files to this dataset. +

+
+
+
+ +
+ + +
+ + + + + +
+ +
diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 61ce0ed5c1e..3a3c14b4a7b 100755 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -100,15 +100,28 @@
-
- - - -
- - + + + - + - +
-
+
- -
- - - - - -
-
-

- Help message proving value and info about rsync to the user. + +

+ + +
+
+

+ Help message proving value and info about rsync to the user. +

+

+ Warning message about how this option is not available because of uploaded files to this dataset. +

+
+
+ + + +

+ +

-

- Warning message about how this option is not available because of uploaded files to this dataset. + + +

+ +

-
+ + +

+ + + + + +

+
    +
  1. + +
  2. +
  3. + + + + +
  4. +
  5. + + + +
  6. +
  7. + +
  8. +
+
+ +
+
-
+ + + + + + + +
+ +
+
+

#{bundle['file.createUploadDisabled']}

+
+
+ +
+
+
+ +
@@ -547,8 +612,8 @@

#{bundle['file.deleteFileDialog.failed.tip']}

- +
@@ -891,6 +956,10 @@ $('button[id$="compareVersions"]').trigger('click'); } } + function deleteFinished() { + $('button[id$="updateEditDataFilesButtonsForDelete"]').trigger('click'); + $('button[id$="allDeletesFinished"]').trigger('click'); + } function checkFilesSelected() { var count = PF('filesTable').getSelectedRowsCount(); if (count > 0) { diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 7911080cdf3..6238ca5a091 100755 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -20,6 +20,7 @@ + @@ -44,7 +45,7 @@ -
+
@@ -95,36 +96,37 @@
- - + + + +
-
- - files buttons edit datafiles -
- - - -
+ +
+ +
+ + + +
+ +
-
- - - - - - - + + + +
+
-
-
+
+ diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index b19ba58fd23..5f734eeff4c 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -90,18 +90,17 @@ - count
- files fragment
+ + + and !widgetWrapper.widgetView }"> From d0e453716ee904a66de1c2a6dfe6f41d6f2c31d9 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 13 Jul 2018 13:50:03 -0400 Subject: [PATCH 004/701] #4610 remove unused code; fix broken test --- src/main/webapp/dataset.xhtml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index c101ab6fcec..1dea5e88dc2 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1153,7 +1153,7 @@
- + @@ -1202,7 +1202,7 @@
- +

From fb934f27144bdd96b927c9914d6658b4bfab0eac Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 17 Jul 2018 14:40:58 -0400 Subject: [PATCH 005/701] #4610 Resolve conflicts fix Download --- .../java/edu/harvard/iq/dataverse/DatasetVersion.java | 10 ++++++++++ src/main/webapp/dataset.xhtml | 5 +++-- src/main/webapp/dataverse.xhtml | 4 ++-- src/main/webapp/editFilesFragment.xhtml | 2 +- src/main/webapp/editdatafiles.xhtml | 2 +- src/main/webapp/filesFragment.xhtml | 9 +++++---- src/main/webapp/metadataFragment.xhtml | 8 ++++---- src/main/webapp/themeAndWidgetsFragment.xhtml | 4 ++-- 8 files changed, 28 insertions(+), 16 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 876da73d619..da07abe1526 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -478,6 +478,16 @@ public boolean isMinorUpdate() { return true; } + public boolean isHasPackageFile(){ + if (this.fileMetadatas.isEmpty()){ + return false; + } + if(this.fileMetadatas.size() > 1){ + return false; + } + return this.fileMetadatas.get(0).getDataFile().getContentType().equals("application/vnd.dataverse.file-package"); + } + public void updateDefaultValuesFromTemplate(Template template) { if (!template.getDatasetFields().isEmpty()) { this.setDatasetFields(this.copyDatasetFields(template.getDatasetFields())); diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index be02be3dd81..c98a0600f70 100755 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -198,13 +198,14 @@ #{bundle['dataset.editBtn']}

- +
- +
diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index f681d02a830..71649e34fee 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -612,7 +612,7 @@

#{bundle['file.deleteFileDialog.failed.tip']}

-
diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 6238ca5a091..f993d09394b 100755 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -103,7 +103,7 @@
- +
diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index a1b95d5e4bf..40dc7a61fc5 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -111,9 +111,10 @@ + and !widgetWrapper.widgetView + and !DatasetPage.workingVersion.hasPackageFile }"> @@ -364,7 +365,7 @@ -
+
@@ -378,7 +379,7 @@
-
+
diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 524f6fdf6c5..609c998fb3f 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -104,7 +104,7 @@ - +
- +
@@ -222,7 +222,7 @@
-
+
diff --git a/src/main/webapp/themeAndWidgetsFragment.xhtml b/src/main/webapp/themeAndWidgetsFragment.xhtml index 315ae30376e..f188cdc4514 100644 --- a/src/main/webapp/themeAndWidgetsFragment.xhtml +++ b/src/main/webapp/themeAndWidgetsFragment.xhtml @@ -7,7 +7,7 @@ - +
@@ -178,7 +178,7 @@
-
+
From b5f81e3208460b1964cfd4419e682a866f64633e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 17 Jul 2018 15:00:30 -0400 Subject: [PATCH 006/701] #4610 fix bundle conflict --- src/main/java/Bundle.properties | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 09b45dc0b66..3740f9a3f09 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1310,13 +1310,8 @@ dataset.privateurl.noPermToCreate=To create a Private URL you must have the foll file.count={0} {0, choice, 0#Files|1#File|2#Files} file.count.selected={0} {0, choice, 0#Files Selected|1#File Selected|2#Files Selected} file.selectToAddBtn=Select Files to Add -<<<<<<< HEAD -file.selectToAdd.tipLimit=File upload limit is {0} bytes per file. -file.selectToAdd.tipMoreInformation=Select or drag and drop files from your desktop, directly into the upload widget. -======= file.selectToAdd.tipLimit=File upload limit is {0} per file. file.selectToAdd.tipMoreInformation=For more information about supported file formats, please refer to the User Guide. ->>>>>>> develop file.selectToAdd.dragdropMsg=Drag and drop files here. file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. file.fromDropbox=Upload from Dropbox From e4a6eede17bfbe89e01e52ec832f6425cf52fac6 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 17 Jul 2018 17:00:53 -0400 Subject: [PATCH 007/701] #4610 backing out autoupdate changes --- src/main/webapp/dataverse.xhtml | 4 ++-- src/main/webapp/metadataFragment.xhtml | 8 ++++---- src/main/webapp/themeAndWidgetsFragment.xhtml | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml index 18d37032776..d501099b561 100644 --- a/src/main/webapp/dataverse.xhtml +++ b/src/main/webapp/dataverse.xhtml @@ -131,7 +131,7 @@
- +
- +
diff --git a/src/main/webapp/metadataFragment.xhtml b/src/main/webapp/metadataFragment.xhtml index 609c998fb3f..524f6fdf6c5 100755 --- a/src/main/webapp/metadataFragment.xhtml +++ b/src/main/webapp/metadataFragment.xhtml @@ -104,7 +104,7 @@ - +
- +
@@ -222,7 +222,7 @@
-
+
diff --git a/src/main/webapp/themeAndWidgetsFragment.xhtml b/src/main/webapp/themeAndWidgetsFragment.xhtml index f188cdc4514..315ae30376e 100644 --- a/src/main/webapp/themeAndWidgetsFragment.xhtml +++ b/src/main/webapp/themeAndWidgetsFragment.xhtml @@ -7,7 +7,7 @@ - +
@@ -178,7 +178,7 @@
-
+
From 3ba00366b83cbd5acc58c8b3160c4482e88a8e54 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 18 Jul 2018 10:29:21 -0400 Subject: [PATCH 008/701] rename "NATIVE" to "native/http" and update javadoc #4610 --- .../harvard/iq/dataverse/util/SystemConfig.java | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 5de06559a27..c6dede2e2d8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -826,15 +826,21 @@ public int getPVNumberOfConsecutiveDigitsAllowed() { * * - TransferProtocols * - * There is a good chance these will be consolidated in the future. The word - * "NATIVE" is a bit of placeholder term to mean how Dataverse has - * traditionally handled files, which tends to involve users uploading and - * downloading files using a browser or APIs. + * There is a good chance these will be consolidated in the future. */ public enum FileUploadMethods { + /** + * DCM stands for Data Capture Module. Right now it supports upload over + * rsync+ssh but DCM may support additional methods in the future. + */ RSYNC("dcm/rsync+ssh"), - NATIVE("NATIVE"); + // TODO: Add "native/dropbox"? + /** + * Traditional Dataverse file handling, which tends to involve users + * uploading and downloading files using a browser or APIs. + */ + NATIVE("native/http"); private final String text; From 2fe2e545deede0e972ce4aed0a140cc4a12427d1 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 19 Jul 2018 12:08:22 -0400 Subject: [PATCH 009/701] #4610 support use of multiple upload methods --- .../harvard/iq/dataverse/SettingsWrapper.java | 8 +++++ .../iq/dataverse/TemplateServiceBean.java | 19 ++++------- .../iq/dataverse/util/SystemConfig.java | 32 ++++++++++++++++--- src/main/webapp/editFilesFragment.xhtml | 4 +-- 4 files changed, 44 insertions(+), 19 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 2559cd6bc8b..19177d81216 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -150,6 +150,14 @@ public boolean isRsyncDownload() { return systemConfig.isRsyncDownload(); } + public boolean isHTTPUpload(){ + return systemConfig.isHTTPUpload(); + } + + public boolean isDropBoxUpload(){ + return systemConfig.isDropBoxUpload(); + } + public boolean isDataFilePIDSequentialDependent(){ return systemConfig.isDataFilePIDSequentialDependent(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java index 66c2d35fe61..dcbb65443f6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/TemplateServiceBean.java @@ -55,17 +55,12 @@ public List findDataversesByDefaultTemplateId(Long defaultTemplateId) } public void incrementUsageCount(Long templateId) { - // Use object instead of Native Queries to update count - //also catch/log exceptions so that create dataset will succeed - //Issue 4847 SEK 7/16/2018 - try { - Template toUpdate = em.find(Template.class, templateId); - Long usage = toUpdate.getUsageCount(); - usage++; - toUpdate.setUsageCount(usage); - em.merge(toUpdate); - } catch (Exception e) { - logger.log(Level.INFO, "Increment usage count failed on template id {0}. ", templateId); - } + + Template toUpdate = em.find(Template.class, templateId); + Long usage = toUpdate.getUsageCount(); + usage++; + toUpdate.setUsageCount(usage); + em.merge(toUpdate); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index c6dede2e2d8..28a7835595e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -835,12 +835,17 @@ public enum FileUploadMethods { * rsync+ssh but DCM may support additional methods in the future. */ RSYNC("dcm/rsync+ssh"), - // TODO: Add "native/dropbox"? /** * Traditional Dataverse file handling, which tends to involve users * uploading and downloading files using a browser or APIs. */ - NATIVE("native/http"); + NATIVE("native/http"), + /** + * Traditional Dataverse file handling, which tends to involve users + * uploading and downloading files using a browser or APIs. + */ + + DROPBOX("native/dropbox"); private final String text; @@ -967,14 +972,31 @@ public boolean isPublicInstall(){ } public boolean isRsyncUpload(){ - String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); - return uploadMethods != null && uploadMethods.toLowerCase().equals(SystemConfig.FileUploadMethods.RSYNC.toString()); + return getUploadMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString()); + } + + public boolean isHTTPUpload(){ + return getUploadMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString()); } + public boolean isDropBoxUpload(){ + return getUploadMethodAvailable(SystemConfig.FileUploadMethods.DROPBOX.toString()); + } + + public boolean isRsyncDownload() { String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); - return downloadMethods !=null && downloadMethods.toLowerCase().equals(SystemConfig.FileDownloadMethods.RSYNC.toString()); + return downloadMethods !=null && downloadMethods.toLowerCase().contains(SystemConfig.FileDownloadMethods.RSYNC.toString()); + } + + private Boolean getUploadMethodAvailable(String method){ + String uploadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.UploadMethods); + if (uploadMethods==null){ + return false; + } else { + return Arrays.asList(uploadMethods.toLowerCase().split("\\s*,\\s*")).contains(method); + } } public boolean isDataFilePIDSequentialDependent(){ diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 71649e34fee..174acf3dfc6 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -27,7 +27,7 @@ -
+
@@ -228,7 +228,7 @@
- +
From ca96daca966edf67e9cad7900adfdbbadbf08093 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 19 Jul 2018 14:07:07 -0400 Subject: [PATCH 010/701] #4610 bundle-ize file upload messages --- src/main/java/Bundle.properties | 5 +++++ src/main/webapp/editFilesFragment.xhtml | 12 ++++++------ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 3740f9a3f09..711f6e2643d 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1316,6 +1316,11 @@ file.selectToAdd.dragdropMsg=Drag and drop files here. file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. file.fromDropbox=Upload from Dropbox file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. +file.fromDropbox.help=Help message providing value and info about Dropbox to the user. +file.fromRsync=Upload with rsync+ssh via Data Capture Module (DCM) +file.fromRsync.help=Help message providing value and info about Rsync to the user. +file.fromHTTP=Upload with HTTP via your browser +file.fromHTTP.help=Help message providing value and info about HTTP upload to the user. file.replace.original=Original File file.editFiles=Edit Files file.editFilesSelected=Edit diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 174acf3dfc6..be106e966f5 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -34,7 +34,7 @@ @@ -45,7 +45,7 @@

- Help message proving value and info about HTTP upload to the user. + #{bundle['file.fromHTTP.help']}

@@ -236,7 +236,7 @@

@@ -244,7 +244,7 @@

- Help message proving value and info about Dropbox to the user. + #{bundle['file.fromDropbox.help']}

@@ -268,7 +268,7 @@ @@ -276,7 +276,7 @@

- Help message proving value and info about rsync to the user. + #{bundle['file.fromRsync.help']}

Warning message about how this option is not available because of uploaded files to this dataset. From 939ad70d80b61a9abd339cfd8e2d32ebc3c240eb Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 19 Jul 2018 14:31:57 -0400 Subject: [PATCH 011/701] ensure "native" upload is enabled for new install or upgrade #4610 --- scripts/api/setup-all.sh | 1 + scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql | 1 + 2 files changed, 2 insertions(+) create mode 100644 scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql diff --git a/scripts/api/setup-all.sh b/scripts/api/setup-all.sh index 03cb2252f2c..b76742ab555 100755 --- a/scripts/api/setup-all.sh +++ b/scripts/api/setup-all.sh @@ -54,6 +54,7 @@ curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" curl -X PUT -d EZID "$SERVER/admin/settings/:DoiProvider" curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy +curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods echo echo "Setting up the admin user (and as superuser)" diff --git a/scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql b/scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql new file mode 100644 index 00000000000..a7a7ddeb814 --- /dev/null +++ b/scripts/database/upgrades/upgrade_v4.9.1_to_v4.9.2.sql @@ -0,0 +1 @@ +INSERT INTO setting(name, content) VALUES (':UploadMethods', 'native/http'); From 0a6e2836630f65feebc41789442264b1dc41e813 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 19 Jul 2018 15:07:19 -0400 Subject: [PATCH 012/701] if HTTP upload is disabled via GUI, disable via API also #4610 --- src/main/java/Bundle.properties | 1 + src/main/java/edu/harvard/iq/dataverse/api/Datasets.java | 6 +++++- src/main/java/edu/harvard/iq/dataverse/api/Files.java | 5 ++++- .../dataverse/api/datadeposit/MediaResourceManagerImpl.java | 4 ++++ .../java/edu/harvard/iq/dataverse/util/SystemConfig.java | 1 + 5 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 711f6e2643d..0cdf5930eb3 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1321,6 +1321,7 @@ file.fromRsync=Upload with rsync+ssh via Data Capture Module (DCM) file.fromRsync.help=Help message providing value and info about Rsync to the user. file.fromHTTP=Upload with HTTP via your browser file.fromHTTP.help=Help message providing value and info about HTTP upload to the user. +file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. file.replace.original=Original File file.editFiles=Edit Files file.editFilesSelected=Edit diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index fd547b800dd..3e70c78cc2a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -72,6 +72,7 @@ import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.json.JsonParseException; @@ -1127,7 +1128,10 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, @FormDataParam("file") final FormDataBodyPart formDataBodyPart ){ - + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } + // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 190c5e49c52..2cfe2c679e7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -151,7 +151,10 @@ public Response replaceFileInDataset( @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, @FormDataParam("file") final FormDataBodyPart formDataBodyPart ){ - + + if (!systemConfig.isHTTPUpload()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index e28a21a71d2..25f8d1eb1c0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.ByteArrayInputStream; @@ -200,6 +201,9 @@ public DepositReceipt addResource(String uri, Deposit deposit, AuthCredentials a } DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration, boolean shouldReplace) throws SwordError, SwordAuthException, SwordServerException { + if (!systemConfig.isHTTPUpload()) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, BundleUtil.getStringFromBundle("file.api.httpDisabled")); + } AuthenticatedUser user = swordAuth.auth(authCredentials); DataverseRequest dvReq = new DataverseRequest(user, httpRequest); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 28a7835595e..6bc67191d51 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -975,6 +975,7 @@ public boolean isRsyncUpload(){ return getUploadMethodAvailable(SystemConfig.FileUploadMethods.RSYNC.toString()); } + // Controls if HTTP upload is enabled for both GUI and API. public boolean isHTTPUpload(){ return getUploadMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString()); } From 4a7fcf52711127593e593346f451a0a6be822145 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 19 Jul 2018 15:20:32 -0400 Subject: [PATCH 013/701] remove help messages that aren't providing value #4610 --- src/main/java/Bundle.properties | 3 --- src/main/webapp/editFilesFragment.xhtml | 11 ----------- 2 files changed, 14 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 0cdf5930eb3..cd8af544785 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1316,11 +1316,8 @@ file.selectToAdd.dragdropMsg=Drag and drop files here. file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. file.fromDropbox=Upload from Dropbox file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. -file.fromDropbox.help=Help message providing value and info about Dropbox to the user. file.fromRsync=Upload with rsync+ssh via Data Capture Module (DCM) -file.fromRsync.help=Help message providing value and info about Rsync to the user. file.fromHTTP=Upload with HTTP via your browser -file.fromHTTP.help=Help message providing value and info about HTTP upload to the user. file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. file.replace.original=Original File file.editFiles=Edit Files diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index be106e966f5..639faf11c6d 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -44,10 +44,6 @@

-

- #{bundle['file.fromHTTP.help']} -

-

-

- #{bundle['file.fromDropbox.help']} -

-
@@ -275,9 +267,6 @@
-

- #{bundle['file.fromRsync.help']} -

Warning message about how this option is not available because of uploaded files to this dataset.

From 1aab2026a9b6a83298b8d5de5095338cf6b411a2 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 19 Jul 2018 15:36:59 -0400 Subject: [PATCH 014/701] update guides to reflect dual mode #4610 --- .../source/developers/big-data-support.rst | 2 +- .../source/installation/config.rst | 18 +++++++++++++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/developers/big-data-support.rst b/doc/sphinx-guides/source/developers/big-data-support.rst index 501c53a3bb5..6cb413ae738 100644 --- a/doc/sphinx-guides/source/developers/big-data-support.rst +++ b/doc/sphinx-guides/source/developers/big-data-support.rst @@ -21,7 +21,7 @@ Installation instructions can be found at https://github.com/sbgrid/data-capture Once you have installed a DCM, you will need to configure two database settings on the Dataverse side. These settings are documented in the :doc:`/installation/config` section of the Installation Guide: - ``:DataCaptureModuleUrl`` should be set to the URL of a DCM you installed. -- ``:UploadMethods`` should be set to ``dcm/rsync+ssh``. +- ``:UploadMethods`` should include ``dcm/rsync+ssh``. This will allow your Dataverse installation to communicate with your DCM, so that Dataverse can download rsync scripts for your users. diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index b8d4678ac0f..d772e3d1cc9 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -26,6 +26,8 @@ The :doc:`/api/native-api` contains a useful but potentially dangerous API endpo By default, all APIs can be operated on remotely and a number of endpoints do not require authentication. https://github.com/IQSS/dataverse/issues/1886 was opened to explore changing these defaults, but until then it is very important to block both the "admin" endpoint (and at least consider blocking ``builtin-users``). For details please see also the section on ``:BlockedApiPolicy`` below. +It's also possible to prevent file uploads via API by adjusting the ``:UploadMethods`` database setting. + Forcing HTTPS +++++++++++++ @@ -520,7 +522,7 @@ Configuration for :doc:`r-rapache-tworavens`. dataverse.dropbox.key +++++++++++++++++++++ -Dropbox provides a Chooser app, which is a Javascript component that allows you to upload files to Dataverse from Dropbox. It is an optional configuration setting, which requires you to pass it an app key. For more information on setting up your Chooser app, visit https://www.dropbox.com/developers/chooser. +Dropbox provides a Chooser app, which is a Javascript component that allows you to upload files to Dataverse from Dropbox. It is an optional configuration setting, which requires you to pass it an app key and configure the ``:UploadMethods`` database setting. For more information on setting up your Chooser app, visit https://www.dropbox.com/developers/chooser. ``./asadmin create-jvm-options "-Ddataverse.dropbox.key={{YOUR_APP_KEY}}"`` @@ -1250,9 +1252,19 @@ The URL for your Repository Storage Abstraction Layer (RSAL) installation. This :UploadMethods ++++++++++++++ -This setting is experimental and to be used with the Data Capture Module (DCM). For now, if you set the upload methods to ``dcm/rsync+ssh`` it will allow your users to download rsync scripts from the DCM. +This setting controls which upload methods are available to users of your installation of Dataverse. The following upload methods are available: + +- ``native/http``: Corresponds to "Upload with HTTP via your browser" and APIs that use HTTP (SWORD and native). +- ``native/dropbox``: Corresponds to "Upload with Dropbox". Note that the JVM option ``dataverse.dropbox.key`` is also required. +- ``dcm/rsync+ssh``: Corresponds to "Upload with rsync+ssh via Data Capture Module (DCM)". A lot of setup is required, as explained in the :doc:`/developers/big-data-support` section of the Dev Guide. + +Out of the box only ``native/http`` is enabled and will work without further configuration. To add multiple upload method, separate them using a comma like this: + +``curl -X PUT -d 'native/http,native/dropbox,dcm/rsync+ssh' http://localhost:8080/api/admin/settings/:UploadMethods`` + +You'll always want at least one upload method, so the easiest way to remove one of them is to simply ``PUT`` just the one you want, like this: -``curl -X PUT -d 'dcm/rsync+ssh' http://localhost:8080/api/admin/settings/:UploadMethods`` +``curl -X PUT -d 'native/http' http://localhost:8080/api/admin/settings/:UploadMethods`` :DownloadMethods ++++++++++++++++ From 8830927926dc09422f852765b2922734de9dd430 Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Fri, 20 Jul 2018 09:10:10 -0400 Subject: [PATCH 015/701] more speedups --- .../iq/dataverse/PermissionServiceBean.java | 279 ++++++++++++------ .../impl/ListDataverseContentCommand.java | 36 ++- 2 files changed, 208 insertions(+), 107 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 2803f8d4ceb..9432cea45ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -31,6 +31,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.util.BundleUtil; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.LinkedList; import java.util.logging.Level; @@ -50,15 +51,15 @@ public class PermissionServiceBean { private static final Logger logger = Logger.getLogger(PermissionServiceBean.class.getName()); - - private static final Set PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY = - EnumSet.copyOf(Arrays.asList(Permission.values()).stream() - .filter( Permission::requiresAuthenticatedUser ) - .collect( Collectors.toList() )); - + + private static final Set PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY + = EnumSet.copyOf(Arrays.asList(Permission.values()).stream() + .filter(Permission::requiresAuthenticatedUser) + .collect(Collectors.toList())); + @EJB BuiltinUserServiceBean userService; - + @EJB AuthenticationServiceBean authenticationService; @@ -66,8 +67,8 @@ public class PermissionServiceBean { DataverseRoleServiceBean roleService; @EJB - RoleAssigneeServiceBean roleAssigneeService; - + RoleAssigneeServiceBean roleAssigneeService; + @EJB DataverseServiceBean dataverseService; @@ -76,17 +77,18 @@ public class PermissionServiceBean { @EJB GroupServiceBean groupService; - + @Inject DataverseSession session; - + @Inject DataverseRequestServiceBean dvRequestService; - + /** * A request-level permission query (e.g includes IP groups). */ public class RequestPermissionQuery { + final DvObject subject; final DataverseRequest request; @@ -94,27 +96,31 @@ public RequestPermissionQuery(DvObject subject, DataverseRequest request) { this.subject = subject; this.request = request; } - + public Set get() { - return PermissionServiceBean.this.permissionsFor(request, subject); + return permissionsFor(request, subject); } public boolean has(Permission p) { - return get().contains(p); + return hasPermissionsFor(request, subject, p); } - - public RequestPermissionQuery on( DvObject dvo ) { + + public RequestPermissionQuery on(DvObject dvo) { return new RequestPermissionQuery(dvo, request); } - + /** - * Tests whether a command of the passed class can be issued over the {@link DvObject} - * in the context of the current request. Note that since some commands have dynamic permissions, - * in some cases it's better to instantiate a command object and pass it to {@link #canIssue(edu.harvard.iq.dataverse.engine.command.Command)}. + * Tests whether a command of the passed class can be issued over the + * {@link DvObject} in the context of the current request. Note that + * since some commands have dynamic permissions, in some cases it's + * better to instantiate a command object and pass it to + * {@link #canIssue(edu.harvard.iq.dataverse.engine.command.Command)}. + * * @param aCmdClass - * @return {@code true} iff instances of the command class can be issued in the context of the current request. + * @return {@code true} iff instances of the command class can be issued + * in the context of the current request. */ - public boolean canIssue( Class aCmdClass ) { + public boolean canIssue(Class aCmdClass) { Map> required = CH.permissionsRequired(aCmdClass); if (required.isEmpty() || required.get("") == null) { logger.fine("IsUserAllowedOn: empty-true"); @@ -125,14 +131,16 @@ public boolean canIssue( Class aCmdClass ) { return grantedUserPermissions.containsAll(requiredPermissionSet); } } - + /** - * Tests whether the command can be issued over the {@link DvObject} - * in the context of the current request. + * Tests whether the command can be issued over the {@link DvObject} in + * the context of the current request. + * * @param aCmd - * @return {@code true} iff the command can be issued in the context of the current request. + * @return {@code true} iff the command can be issued in the context of + * the current request. */ - public boolean canIssue( Command aCmd ) { + public boolean canIssue(Command aCmd) { Map> required = aCmd.getRequiredPermissions(); if (required.isEmpty() || required.get("") == null) { logger.fine("IsUserAllowedOn: empty-true"); @@ -144,9 +152,10 @@ public boolean canIssue( Command aCmd ) { } } } - + /** - * A permission query for a given role assignee. Does not cover request-level permissions. + * A permission query for a given role assignee. Does not cover + * request-level permissions. */ public class StaticPermissionQuery { @@ -164,7 +173,8 @@ public StaticPermissionQuery user(RoleAssignee anotherUser) { /** * "Fast and loose" query mechanism, allowing to pass the command class - * name, does not take request-level permissions into account. Command is assumed to live in + * name, does not take request-level permissions into account. Command + * is assumed to live in * {@code edu.harvard.iq.dataverse.engine.command.impl.} * * @deprecated Use DynamicPermissionQuery instead @@ -188,73 +198,133 @@ public boolean has(Permission p) { } public boolean has(String pName) { - return get().contains(Permission.valueOf(pName)); + return has(Permission.valueOf(pName)); } } + public Collection userPermissions(User user, DvObject dvo){ + String powerfull_roles = "select id from dataverserole where (permissionbits&12)!=0"; + List roles = em.createNativeQuery(powerfull_roles).getResultList(); + String x = "select id from dataverserole where (permissionbits&12)!=0"; + return null; + } public List assignmentsOn(DvObject d) { return em.createNamedQuery("RoleAssignment.listByDefinitionPointId", RoleAssignment.class) .setParameter("definitionPointId", d.getId()).getResultList(); } - + /** - * Finds all the permissions the {@link User} in {@code req} has over + * Finds all the permissions the {@link User} in {@code req} has over * {@code dvo}, in the context of {@code req}. - * @param req + * + * @param req * @param dvo * @return Permissions of {@code req.getUser()} over {@code dvo}. */ - public Set permissionsFor( DataverseRequest req, DvObject dvo ) { - Set permissions = EnumSet.noneOf(Permission.class); - - // Add permissions specifically given to the user - permissions.addAll( permissionsForSingleRoleAssignee(req.getUser(),dvo) ); - - Set groups = groupService.groupsFor(req,dvo); - + public Set permissionsFor(DataverseRequest req, DvObject dvo) { + // Start with permissions specifically given to the user + Set permissions = permissionsForSingleRoleAssignee(req.getUser(), dvo); + // Add permissions gained from groups - for ( Group g : groups ) { - final Set groupPremissions = permissionsForSingleRoleAssignee(g,dvo); - permissions.addAll(groupPremissions); + for (Group g : groupService.groupsFor(req, dvo)) { + permissionsForSingleRoleAssignee(g, dvo, permissions); } - if ( ! req.getUser().isAuthenticated() ) { - permissions.removeAll( PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY ); + if (!req.getUser().isAuthenticated()) { + permissions.removeAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY); } - + return permissions; } - + + public boolean hasPermissionsFor(DataverseRequest req, DvObject dvo, Permission p) { + if (!req.getUser().isAuthenticated() && PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY.contains(p)) { + return false; + } + + // Start with permissions specifically given to the user + if (hasPermissionsForSingleRoleAssignee(req.getUser(), dvo, p)) { + return true; + } + + // Add permissions gained from groups + for (Group g : groupService.groupsFor(req, dvo)) { + if (hasPermissionsForSingleRoleAssignee(g, dvo, p)) { + return true; + } + } + + return false; + } + + private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, Permission p) { + // super user check + // for 4.0, we are allowing superusers all permissions + // for secure data, we may need to restrict some of the permissions + if (ra instanceof AuthenticatedUser && ((AuthenticatedUser) ra).isSuperuser()) { + return true; + } + // File special case. + if (d instanceof DataFile && p.equals(Permission.DownloadFile)) { + // unrestricted files that are part of a release dataset + // automatically get download permission for everybody: + // -- L.A. 4.0 beta12 + + DataFile df = (DataFile) d; + + if (!df.isRestricted()) { + if (df.getOwner().getReleasedVersion() != null) { + if (df.getOwner().getReleasedVersion().getFileMetadatas() != null) { + return true; + } + } + } + } + + // Direct assignments to ra on d + for (RoleAssignment asmnt : assignmentsFor(ra, d)) { + if (asmnt.getRole().permissions().contains(p)) { + return true; + } + } + + // Recurse up the group containment hierarchy. + for (Group grp : groupService.groupsFor(ra, d)) { + if (hasPermissionsForSingleRoleAssignee(grp, d, p)) { + return true; + } + } + return false; + } + /** - * Returns the set of permission a user/group has over a dataverse object. + * Returns the set of permission a user/group has over a dataverse object. * This method takes into consideration group memberships as well, but does * not look into request-level groups. + * * @param ra The role assignee. * @param dvo The {@link DvObject} on which the user wants to operate * @return the set of permissions {@code ra} has over {@code dvo}. */ public Set permissionsFor(RoleAssignee ra, DvObject dvo) { - Set permissions = EnumSet.noneOf(Permission.class); - - // Add permissions specifically given to the user - permissions.addAll( permissionsForSingleRoleAssignee(ra,dvo) ); - + // Start with permissions specifically given to the user + Set permissions = permissionsForSingleRoleAssignee(ra, dvo); + // Add permissions gained from groups - Set groupsRaBelongsTo = groupService.groupsFor(ra,dvo); - for ( Group g : groupsRaBelongsTo ) { - permissions.addAll( permissionsForSingleRoleAssignee(g,dvo) ); + Set groupsRaBelongsTo = groupService.groupsFor(ra, dvo); + for (Group g : groupsRaBelongsTo) { + permissionsForSingleRoleAssignee(g, dvo, permissions); } - - if ( (ra instanceof User) && (! ((User)ra).isAuthenticated()) ) { - permissions.removeAll( PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY ); + + if ((ra instanceof User) && (!((User) ra).isAuthenticated())) { + permissions.removeAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY); } - + return permissions; } - private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d) { // super user check // for 4.0, we are allowing superusers all permissions @@ -262,18 +332,28 @@ private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObje if (ra instanceof AuthenticatedUser && ((AuthenticatedUser) ra).isSuperuser()) { return EnumSet.allOf(Permission.class); } - + // Start with no permissions, build from there. Set retVal = EnumSet.noneOf(Permission.class); + return permissionsForSingleRoleAssignee(ra, d, retVal); + } + private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, Set retVal) { + // super user check + // for 4.0, we are allowing superusers all permissions + // for secure data, we may need to restrict some of the permissions + if (ra instanceof AuthenticatedUser && ((AuthenticatedUser) ra).isSuperuser()) { + retVal.addAll(EnumSet.allOf(Permission.class)); + return retVal; + } // File special case. if (d instanceof DataFile) { // unrestricted files that are part of a release dataset // automatically get download permission for everybody: // -- L.A. 4.0 beta12 - - DataFile df = (DataFile)d; - + + DataFile df = (DataFile) d; + if (!df.isRestricted()) { if (df.getOwner().getReleasedVersion() != null) { if (df.getOwner().getReleasedVersion().getFileMetadatas() != null) { @@ -287,21 +367,22 @@ private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObje } } } - + // Direct assignments to ra on d - assignmentsFor(ra, d).forEach( + assignmentsFor(ra, d).forEach( asmnt -> retVal.addAll(asmnt.getRole().permissions()) ); - + // Recurse up the group containment hierarchy. groupService.groupsFor(ra, d).forEach( - grp -> retVal.addAll(permissionsForSingleRoleAssignee(grp, d))); + grp -> permissionsForSingleRoleAssignee(grp, d, retVal)); return retVal; } /** * Returns all the role assignments that are effective for {@code ra} over * {@code d}. Traverses the containment hierarchy of the {@code d}. + * * @param ra The role assignee whose role assignemnts we look for. * @param d The dataverse object over which the roles are assigned * @return A set of all the role assignments for {@code ra} over {@code d}. @@ -321,15 +402,19 @@ public Set assignmentsFor(RoleAssignee ra, DvObject d) { } /** - * For commands with no named dvObjects, this allows a quick check whether - * a user can issue the command on the dataverse or not. + * For commands with no named dvObjects, this allows a quick check whether a + * user can issue the command on the dataverse or not. * * @param u * @param commandClass * @param dvo * @return - * @deprecated As commands have dynamic permissions now, it is not enough to look at the static permissions anymore. - * @see #isUserAllowedOn(edu.harvard.iq.dataverse.authorization.RoleAssignee, edu.harvard.iq.dataverse.engine.command.Command, edu.harvard.iq.dataverse.DvObject) + * @deprecated As commands have dynamic permissions now, it is not enough to + * look at the static permissions anymore. + * @see + * #isUserAllowedOn(edu.harvard.iq.dataverse.authorization.RoleAssignee, + * edu.harvard.iq.dataverse.engine.command.Command, + * edu.harvard.iq.dataverse.DvObject) */ public boolean isUserAllowedOn(RoleAssignee u, Class commandClass, DvObject dvo) { Map> required = CH.permissionsRequired(commandClass); @@ -369,25 +454,26 @@ public RequestPermissionQuery on(DvObject d) { } return requestOn(dvRequestService.getDataverseRequest(), d); } - - public RequestPermissionQuery requestOn( DataverseRequest req, DvObject dvo ) { + + public RequestPermissionQuery requestOn(DataverseRequest req, DvObject dvo) { if (dvo.getId() == null) { throw new IllegalArgumentException("Cannot query permissions on a DvObject with a null id."); } return new RequestPermissionQuery(dvo, req); } - - public RequestPermissionQuery request( DataverseRequest req ) { + + public RequestPermissionQuery request(DataverseRequest req) { return new RequestPermissionQuery(null, req); } - + /** * Go from (User, Permission) to a list of Dataverse objects that the user * has the permission on. * * @param user * @param permission - * @return The list of dataverses {@code user} has permission {@code permission} on. + * @return The list of dataverses {@code user} has permission + * {@code permission} on. */ public List getDataversesUserHasPermissionOn(AuthenticatedUser user, Permission permission) { Set groups = groupService.groupsFor(user); @@ -409,20 +495,20 @@ public List getDataversesUserHasPermissionOn(AuthenticatedUser user, } return dataversesUserHasPermissionOn; } - + public List getUsersWithPermissionOn(Permission permission, DvObject dvo) { List usersHasPermissionOn = new LinkedList<>(); Set ras = roleService.rolesAssignments(dvo); for (RoleAssignment ra : ras) { if (ra.getRole().permissions().contains(permission)) { RoleAssignee raee = roleAssigneeService.getRoleAssignee(ra.getAssigneeIdentifier()); - usersHasPermissionOn.addAll(roleAssigneeService.getExplicitUsers(raee)); + usersHasPermissionOn.addAll(roleAssigneeService.getExplicitUsers(raee)); } } - + return usersHasPermissionOn; } - + public Map getDistinctUsersWithPermissionOn(Permission permission, DvObject dvo) { List users = getUsersWithPermissionOn(permission, dvo); @@ -432,8 +518,8 @@ public Map getDistinctUsersWithPermissionOn(Permissio }); return distinctUsers; - } - + } + public List getDvObjectsUserHasRoleOn(User user) { return getDvObjectIdsUserHasRoleOn(user, null, null, false); } @@ -446,8 +532,7 @@ public List getDvObjectIdsUserHasRoleOn(User user, List rol Method takes in a user and optional list of roles and dvobject type queries the role assigment table filtering by optional roles and dv returns dvobject ids - */ - + */ private String getRolesClause(List roles) { StringBuilder roleStringBld = new StringBuilder(); if (roles != null && !roles.isEmpty()) { @@ -480,12 +565,10 @@ private String getTypesClause(List types) { } return typeStringBld.toString(); } - - - public List getDvObjectIdsUserHasRoleOn(User user, List roles, List types, boolean indirect) { + public List getDvObjectIdsUserHasRoleOn(User user, List roles, List types, boolean indirect) { - String roleString = getRolesClause (roles); + String roleString = getRolesClause(roles); String typeString = getTypesClause(types); Query nativeQuery = em.createNativeQuery("SELECT id FROM dvobject WHERE " @@ -506,7 +589,7 @@ public List getDvObjectIdsUserHasRoleOn(User user, List rol } } } - + // Get child datasets and files if (indirect) { indirectParentIds += ") "; @@ -540,7 +623,7 @@ public List getDvObjectIdsUserHasRoleOn(User user, List rol } return dataversesUserHasPermissionOn; } - + public void checkEditDatasetLock(Dataset dataset, DataverseRequest dataverseRequest, Command command) throws IllegalCommandException { if (dataset.isLocked()) { if (dataset.isLockedFor(DatasetLock.Reason.InReview)) { @@ -565,7 +648,7 @@ public void checkEditDatasetLock(Dataset dataset, DataverseRequest dataverseRequ } } } - + public void checkDownloadFileLock(Dataset dataset, DataverseRequest dataverseRequest, Command command) throws IllegalCommandException { if (dataset.isLocked()) { if (dataset.isLockedFor(DatasetLock.Reason.InReview)) { @@ -577,7 +660,7 @@ public void checkDownloadFileLock(Dataset dataset, DataverseRequest dataverseReq if (dataset.isLockedFor(DatasetLock.Reason.Ingest)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowed"), command); } - if (dataset.isLockedFor(DatasetLock.Reason.pidRegister)) { + if (dataset.isLockedFor(DatasetLock.Reason.pidRegister)) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("dataset.message.locked.downloadNotAllowed"), command); } // TODO: Do we need to check for "Workflow"? Should the message be more specific? diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java index 5c465748795..903d3aaf4d1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java @@ -4,6 +4,8 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.authorization.Permission; +import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -33,16 +35,32 @@ public ListDataverseContentCommand(DataverseRequest aRequest, Dataverse anAffect @Override public List execute(CommandContext ctxt) throws CommandException { LinkedList result = new LinkedList<>(); - - for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) { - if (ds.isReleased() || ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { - result.add(ds); + User user = getRequest().getUser(); + if (user.isSuperuser()) { + result.addAll(ctxt.datasets().findByOwnerId(dvToList.getId())); + result.addAll(ctxt.dataverses().findByOwnerId(dvToList.getId())); + } else if (user.isAuthenticated()) { + AuthenticatedUser au = (AuthenticatedUser) user; + for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) { + if (ds.isReleased() || ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { + result.add(ds); + } } - } - - for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) { - if (dv.isReleased() || ctxt.permissions().requestOn(getRequest(), dv).has(Permission.ViewUnpublishedDataverse)) { - result.add(dv); + for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) { + if (dv.isReleased() || ctxt.permissions().requestOn(getRequest(), dv).has(Permission.ViewUnpublishedDataverse)) { + result.add(dv); + } + } + } else { + for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) { + if (ds.isReleased()) { + result.add(ds); + } + } + for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) { + if (dv.isReleased()) { + result.add(dv); + } } } From 5c2725164f9ca57c626947277c88a3d4003249fe Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Fri, 20 Jul 2018 15:02:09 -0400 Subject: [PATCH 016/701] removed duplicate check --- .../iq/dataverse/PermissionServiceBean.java | 21 ++++++------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 9432cea45ab..24cf1072dff 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -239,12 +239,16 @@ public Set permissionsFor(DataverseRequest req, DvObject dvo) { } public boolean hasPermissionsFor(DataverseRequest req, DvObject dvo, Permission p) { - if (!req.getUser().isAuthenticated() && PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY.contains(p)) { + User user = req.getUser(); + if (user.isSuperuser()){ + return true; + } + else if (!user.isAuthenticated() && PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY.contains(p)) { return false; } // Start with permissions specifically given to the user - if (hasPermissionsForSingleRoleAssignee(req.getUser(), dvo, p)) { + if (hasPermissionsForSingleRoleAssignee(user, dvo, p)) { return true; } @@ -259,12 +263,6 @@ public boolean hasPermissionsFor(DataverseRequest req, DvObject dvo, Permission } private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, Permission p) { - // super user check - // for 4.0, we are allowing superusers all permissions - // for secure data, we may need to restrict some of the permissions - if (ra instanceof AuthenticatedUser && ((AuthenticatedUser) ra).isSuperuser()) { - return true; - } // File special case. if (d instanceof DataFile && p.equals(Permission.DownloadFile)) { // unrestricted files that are part of a release dataset @@ -288,13 +286,6 @@ private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, return true; } } - - // Recurse up the group containment hierarchy. - for (Group grp : groupService.groupsFor(ra, d)) { - if (hasPermissionsForSingleRoleAssignee(grp, d, p)) { - return true; - } - } return false; } From f7a9a4384c86a66c08682a3837cc8057cd487139 Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Mon, 23 Jul 2018 11:27:16 -0400 Subject: [PATCH 017/701] updated PermissionServiceBean --- .../iq/dataverse/PermissionServiceBean.java | 111 ++++++++++-------- 1 file changed, 61 insertions(+), 50 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index 24cf1072dff..c1e4ac3d1c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -102,6 +102,13 @@ public Set get() { } public boolean has(Permission p) { + return hasPermissionsFor(request, subject, EnumSet.of(p)); + } + + public boolean has(Set p) { + if(p.isEmpty()){ + return true; + } return hasPermissionsFor(request, subject, p); } @@ -126,9 +133,8 @@ public boolean canIssue(Class aCmdClass) { logger.fine("IsUserAllowedOn: empty-true"); return true; } else { - Set grantedUserPermissions = permissionsFor(request, subject); Set requiredPermissionSet = required.get(""); - return grantedUserPermissions.containsAll(requiredPermissionSet); + return has(requiredPermissionSet); } } @@ -146,9 +152,8 @@ public boolean canIssue(Command aCmd) { logger.fine("IsUserAllowedOn: empty-true"); return true; } else { - Set grantedUserPermissions = permissionsFor(request, subject); Set requiredPermissionSet = required.get(""); - return grantedUserPermissions.containsAll(requiredPermissionSet); + return has(requiredPermissionSet); } } } @@ -194,7 +199,7 @@ public Set get() { } public boolean has(Permission p) { - return get().contains(p); + return hasPermissionsFor(user, subject, EnumSet.of(p)); } public boolean has(String pName) { @@ -213,47 +218,29 @@ public List assignmentsOn(DvObject d) { return em.createNamedQuery("RoleAssignment.listByDefinitionPointId", RoleAssignment.class) .setParameter("definitionPointId", d.getId()).getResultList(); } - - /** - * Finds all the permissions the {@link User} in {@code req} has over - * {@code dvo}, in the context of {@code req}. - * - * @param req - * @param dvo - * @return Permissions of {@code req.getUser()} over {@code dvo}. - */ - public Set permissionsFor(DataverseRequest req, DvObject dvo) { - // Start with permissions specifically given to the user - Set permissions = permissionsForSingleRoleAssignee(req.getUser(), dvo); - - // Add permissions gained from groups - for (Group g : groupService.groupsFor(req, dvo)) { - permissionsForSingleRoleAssignee(g, dvo, permissions); - } - - if (!req.getUser().isAuthenticated()) { - permissions.removeAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY); - } - - return permissions; + + public boolean hasPermissionsFor(DataverseRequest req, DvObject dvo, Set p) { + return hasPermissionsFor(req.getUser(), dvo, p); } - public boolean hasPermissionsFor(DataverseRequest req, DvObject dvo, Permission p) { - User user = req.getUser(); - if (user.isSuperuser()){ - return true; - } - else if (!user.isAuthenticated() && PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY.contains(p)) { - return false; + public boolean hasPermissionsFor(RoleAssignee ra, DvObject dvo, Set p) { + if (ra instanceof User){ + User user = (User) ra; + if (user.isSuperuser()){ + return true; + } + else if (!user.isAuthenticated() && EnumSet.copyOf(p).retainAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY)) { + return false; + } } // Start with permissions specifically given to the user - if (hasPermissionsForSingleRoleAssignee(user, dvo, p)) { + if (hasPermissionsForSingleRoleAssignee(ra, dvo, p)) { return true; } // Add permissions gained from groups - for (Group g : groupService.groupsFor(req, dvo)) { + for (Group g : groupService.groupsFor(ra, dvo)) { if (hasPermissionsForSingleRoleAssignee(g, dvo, p)) { return true; } @@ -261,10 +248,10 @@ else if (!user.isAuthenticated() && PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY.con return false; } - - private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, Permission p) { + + private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, Set p) { // File special case. - if (d instanceof DataFile && p.equals(Permission.DownloadFile)) { + if (d instanceof DataFile && p.contains(Permission.DownloadFile)) { // unrestricted files that are part of a release dataset // automatically get download permission for everybody: // -- L.A. 4.0 beta12 @@ -274,19 +261,48 @@ private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, if (!df.isRestricted()) { if (df.getOwner().getReleasedVersion() != null) { if (df.getOwner().getReleasedVersion().getFileMetadatas() != null) { - return true; + p.remove(Permission.DownloadFile); } } } } - + if(p.isEmpty()){ + return true; + } + // Direct assignments to ra on d for (RoleAssignment asmnt : assignmentsFor(ra, d)) { - if (asmnt.getRole().permissions().contains(p)) { + p.retainAll(asmnt.getRole().permissions()); + if (p.isEmpty()) { return true; } } - return false; + return p.isEmpty(); + } + + + /** + * Finds all the permissions the {@link User} in {@code req} has over + * {@code dvo}, in the context of {@code req}. + * + * @param req + * @param dvo + * @return Permissions of {@code req.getUser()} over {@code dvo}. + */ + public Set permissionsFor(DataverseRequest req, DvObject dvo) { + // Start with permissions specifically given to the user + Set permissions = permissionsForSingleRoleAssignee(req.getUser(), dvo); + + // Add permissions gained from groups + for (Group g : groupService.groupsFor(req, dvo)) { + permissionsForSingleRoleAssignee(g, dvo, permissions); + } + + if (!req.getUser().isAuthenticated()) { + permissions.removeAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY); + } + + return permissions; } /** @@ -363,10 +379,6 @@ private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObje assignmentsFor(ra, d).forEach( asmnt -> retVal.addAll(asmnt.getRole().permissions()) ); - - // Recurse up the group containment hierarchy. - groupService.groupsFor(ra, d).forEach( - grp -> permissionsForSingleRoleAssignee(grp, d, retVal)); return retVal; } @@ -422,9 +434,8 @@ private boolean isUserAllowedOn(RoleAssignee u, Map> req logger.fine("IsUserAllowedOn: empty-true"); return true; } else { - Set grantedUserPermissions = permissionsFor(u, dvo); Set requiredPermissionSet = required.get(""); - return grantedUserPermissions.containsAll(requiredPermissionSet); + return hasPermissionsFor(u, dvo, requiredPermissionSet); } } From 7676f69e0879155368b312ceb25b2d0ae635f1bc Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Mon, 23 Jul 2018 11:40:25 -0400 Subject: [PATCH 018/701] removeall and retain all are kind of different --- .../java/edu/harvard/iq/dataverse/PermissionServiceBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index c1e4ac3d1c6..f75e3274caa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -272,7 +272,7 @@ private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, // Direct assignments to ra on d for (RoleAssignment asmnt : assignmentsFor(ra, d)) { - p.retainAll(asmnt.getRole().permissions()); + p.removeAll(asmnt.getRole().permissions()); if (p.isEmpty()) { return true; } From e425c5ac77d54de287bcf76f8c5cac479309f2bd Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Mon, 23 Jul 2018 14:38:08 -0400 Subject: [PATCH 019/701] temporary changes to add debugging --- .../impl/ListDataverseContentCommand.java | 30 +++++++++++-------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java index 903d3aaf4d1..8b895387c57 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java @@ -10,12 +10,12 @@ import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import edu.harvard.iq.dataverse.engine.command.exception.PermissionException; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.logging.Logger; /** * Lists the content of a dataverse - both datasets and dataverses. @@ -25,6 +25,8 @@ // no annotations here, since permissions are dynamically decided public class ListDataverseContentCommand extends AbstractCommand> { + private static final Logger logger = Logger.getLogger(ListDataverseContentCommand.class.getName()); + private final Dataverse dvToList; public ListDataverseContentCommand(DataverseRequest aRequest, Dataverse anAffectedDataverse) { @@ -41,27 +43,29 @@ public List execute(CommandContext ctxt) throws CommandException { result.addAll(ctxt.dataverses().findByOwnerId(dvToList.getId())); } else if (user.isAuthenticated()) { AuthenticatedUser au = (AuthenticatedUser) user; - for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) { + List datasets = ctxt.datasets().findByOwnerId(dvToList.getId()); + int i = 0; + long t0 = System.currentTimeMillis(); + for (Dataset ds : datasets) { + i++; + logger.info("On "+i+" out of " + datasets.size()); if (ds.isReleased() || ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { result.add(ds); } } - for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) { + logger.info(""+(System.currentTimeMillis()-t0)); + List dataverses = ctxt.dataverses().findByOwnerId(dvToList.getId()); + for (Dataverse dv : dataverses) { + i++; + logger.info("On "+i+" out of " + (datasets.size()+dataverses.size())); if (dv.isReleased() || ctxt.permissions().requestOn(getRequest(), dv).has(Permission.ViewUnpublishedDataverse)) { result.add(dv); } } + logger.info(""+(System.currentTimeMillis()-t0)); } else { - for (Dataset ds : ctxt.datasets().findByOwnerId(dvToList.getId())) { - if (ds.isReleased()) { - result.add(ds); - } - } - for (Dataverse dv : ctxt.dataverses().findByOwnerId(dvToList.getId())) { - if (dv.isReleased()) { - result.add(dv); - } - } + result.addAll(ctxt.datasets().findPublishedByOwnerId(dvToList.getId())); + result.addAll(ctxt.dataverses().findPublishedByOwnerId(dvToList.getId())); } return result; From 67d8296ea5bb91cc38d0517e9bdd7067796116fd Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 23 Jul 2018 15:13:44 -0400 Subject: [PATCH 020/701] #4610 Fix Lock issue and tests --- .../java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 3 +++ .../datacapturemodule/DataCaptureModuleUtil.java | 9 +++++---- .../datacapturemodule/DataCaptureModuleUtilTest.java | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 1dc29cd8ce7..ff618ec91ac 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -557,6 +557,9 @@ public String init() { } saveEnabled = true; + if (mode == FileEditMode.UPLOAD && workingVersion.getFileMetadatas().isEmpty() && settingsWrapper.isRsyncUpload()) { + setUpRsync(); + } if (mode == FileEditMode.UPLOAD) { JH.addMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.message.uploadFiles.label"), BundleUtil.getStringFromBundle("dataset.message.uploadFiles.message", Arrays.asList(systemConfig.getGuidesBaseUrl(), systemConfig.getGuidesVersion()))); diff --git a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java index d5a883a15a7..bc8994abd41 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtil.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.util.SystemConfig; +import java.util.Arrays; import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonObject; @@ -15,11 +16,11 @@ public class DataCaptureModuleUtil { private static final Logger logger = Logger.getLogger(DataCaptureModuleUtil.class.getCanonicalName()); public static boolean rsyncSupportEnabled(String uploadMethodsSettings) { - logger.fine("uploadMethodsSettings: " + uploadMethodsSettings); - if (uploadMethodsSettings != null && SystemConfig.FileUploadMethods.RSYNC.toString().equals(uploadMethodsSettings)) { - return true; - } else { + logger.fine("uploadMethodsSettings: " + uploadMethodsSettings);; + if (uploadMethodsSettings==null){ return false; + } else { + return Arrays.asList(uploadMethodsSettings.toLowerCase().split("\\s*,\\s*")).contains(SystemConfig.FileUploadMethods.RSYNC.toString()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java index 08b020ec2ca..75252cc3ac2 100644 --- a/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/datacapturemodule/DataCaptureModuleUtilTest.java @@ -26,9 +26,9 @@ public void testRsyncSupportEnabled() { System.out.println("rsyncSupportEnabled"); assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled(null)); assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("dcm/rsync+ssh")); - // We haven't finalized what the separator will be yet. + // Comma sepratated lists of upload methods are supported. assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE:dcm/rsync+ssh")); - assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE,dcm/rsync+ssh")); + assertEquals(true, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE,dcm/rsync+ssh")); assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("NATIVE")); assertEquals(false, DataCaptureModuleUtil.rsyncSupportEnabled("junk")); } From 2add42be64f5280d727f0f9552d7193852bf58d6 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 23 Jul 2018 16:38:24 -0400 Subject: [PATCH 021/701] #4610 Added download render logic to file page --- src/main/webapp/file.xhtml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 5c6eda68571..478ad8c2955 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -171,7 +171,7 @@
+ and !settingsWrapper.rsyncDownload and !FilePage.fileMetadata.dataFile.filePackage}"> @@ -312,7 +312,7 @@ + rendered="#{settingsWrapper.rsyncDownload and FilePage.fileMetadata.dataFile.filePackage }"> @@ -374,7 +374,7 @@
-
+
From 4840c0840c19f878efc2ba747affdef2ff750128 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Mon, 23 Jul 2018 16:41:05 -0400 Subject: [PATCH 022/701] #4610 display download button on non-package files --- src/main/webapp/file.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 478ad8c2955..22f7cc35a4d 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -171,7 +171,7 @@
+ and !FilePage.fileMetadata.dataFile.filePackage}"> From 1b930bf3b9c6b8a06e786e730c8fc9b4e2e9fc0c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 24 Jul 2018 13:44:37 -0400 Subject: [PATCH 023/701] #4610 fix error handling for addFile apis --- src/main/java/Bundle.properties | 1 + .../edu/harvard/iq/dataverse/api/Datasets.java | 18 +++++++++++------- .../edu/harvard/iq/dataverse/api/Files.java | 4 ++++ .../datadeposit/MediaResourceManagerImpl.java | 10 ++++++---- 4 files changed, 22 insertions(+), 11 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index cd8af544785..ef7d5ac37f6 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1319,6 +1319,7 @@ file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. file.fromRsync=Upload with rsync+ssh via Data Capture Module (DCM) file.fromHTTP=Upload with HTTP via your browser file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. +file.api.alreadyHasPackageFile=File upload via HTTP disabled since this dataset already contains a package file. file.replace.original=Original File file.editFiles=Edit Files file.editFilesSelected=Edit diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 3e70c78cc2a..32301a8717a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -1143,13 +1143,6 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, ResourceBundle.getBundle("Bundle").getString("file.addreplace.error.auth") ); } - //--------------------------------------- - // (1A) Make sure that the upload type is not rsync - // ------------------------------------- - - if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) { - return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload."); - } // ------------------------------------- @@ -1165,6 +1158,17 @@ public Response addFileToDataset(@PathParam("id") String idSupplied, return wr.getResponse(); } + //------------------------------------ + // (2a) Make sure dataset does not have package file + // + // -------------------------------------- + + if (dataset.getEditVersion().isHasPackageFile()){ + return error(Response.Status.FORBIDDEN, + ResourceBundle.getBundle("Bundle").getString("file.api.alreadyHasPackageFile") + ); + } + // ------------------------------------- // (3) Get the file name and content type diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 2cfe2c679e7..40420f56a2d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -225,6 +225,10 @@ public Response replaceFileInDataset( try { DataFile dataFile = findDataFileOrDie(fileIdOrPersistentId); fileToReplaceId = dataFile.getId(); + + if (dataFile.isFilePackage()) { + return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")); + } } catch (WrappedResponse ex) { String error = BundleUtil.getStringFromBundle("file.addreplace.error.existing_file_to_replace_not_found_by_id", Arrays.asList(fileIdOrPersistentId)); // TODO: Some day, return ex.getResponse() instead. Also run FilesIT and updated expected status code and message. diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 25f8d1eb1c0..1ac4d553a81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -221,12 +221,14 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au } //--------------------------------------- - // Make sure that the upload type is not rsync + // Make sure that the upload type is not rsync - handled above for dual mode // ------------------------------------- - if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) { - throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload."); - } + if (dataset.getEditVersion().isHasPackageFile()) { + throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile")); + } + + // Right now we are only supporting UriRegistry.PACKAGE_SIMPLE_ZIP but // in the future maybe we'll support other formats? Rdata files? Stata files? /** From 5b3cc46ca7ec4ad232823ebe1bc666f42d7c797a Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Tue, 24 Jul 2018 14:17:20 -0400 Subject: [PATCH 024/701] Checked in scripts for counting queries in the PostgresQL logs. --- scripts/database/querycount/README.txt | 56 ++++++++++++++++++++++++++ scripts/database/querycount/count.pl | 37 +++++++++++++++++ scripts/database/querycount/parse.pl | 56 ++++++++++++++++++++++++++ 3 files changed, 149 insertions(+) create mode 100644 scripts/database/querycount/README.txt create mode 100755 scripts/database/querycount/count.pl create mode 100755 scripts/database/querycount/parse.pl diff --git a/scripts/database/querycount/README.txt b/scripts/database/querycount/README.txt new file mode 100644 index 00000000000..be67fd8d6b6 --- /dev/null +++ b/scripts/database/querycount/README.txt @@ -0,0 +1,56 @@ +This script counts queries *on the PostgresQL server side*. + +To use it, enable verbose logging on the postgres server: + +Edit your postgresql.conf (for example, +/var/lib/pgsql/9.3/data/postgresql.conf) and set "log_statement" to +"all", like this: + +log_statement = 'all' # none, ddl, mod, all + +Then restart postgresql. + +Now you should have a fast-growing log file in your pg_log directory. +For example, /var/lib/pgsql/9.3/data/pg_log/postgresql-Tue.log. (The +name of the log file may vary on your system!) + +Copy the 2 scripts, count.pl and parse.pl to the log directory. + +For example: + +cp scripts/database/querycount/*.pl /var/lib/pgsql/9.3/data/pg_log/ + +Then run the count script as follows: + +cd /var/lib/pgsql/9.3/data/pg_log/ +./count.pl + +you will see something like this: + +# ./count.pl postgresql-Mon.log +Current size: 3090929 bytes. +Press any key when ready. + +Now go to your Dataverse and do whatever it is that you are +testing. Then press any key to tell the script that it's done. It will +then save the tail of the log file generated since you started the +script, parse it, count the queries and output the total and the +queries by type sorted by frequency: + +Parsed and counted the queries. Total number: +22593 + +Queries, counted and sorted: + + 6248 SELECT ID, ASSIGNEEIDENTIFIER, PRIVATEURLTOKEN, DEFINITIONPOINT_ID, ROLE_ID FROM ROLEASSIGNMENT + 6158 SELECT t1.ID, t1.DESCRIPTION, t1.DISPLAYNAME, t1.GROUPALIAS, t1.GROUPALIASINOWNER, t1.OWNER_ID FROM EXPLICITGROUP t0, explicitgroup_explicitgroup t2, EXPLICITGROUP t1 + 4934 SELECT t0.ID, t0.DESCRIPTION, t0.DISPLAYNAME, t0.GROUPALIAS, t0.GROUPALIASINOWNER, t0.OWNER_ID FROM EXPLICITGROUP t0, ExplicitGroup_CONTAINEDROLEASSIGNEES t1 + 2462 SELECT t1.ID, t1.DESCRIPTION, t1.DISPLAYNAME, t1.GROUPALIAS, t1.GROUPALIASINOWNER, t1.OWNER_ID FROM AUTHENTICATEDUSER t0, EXPLICITGROUP_AUTHENTICATEDUSER t2, EXPLICITGROUP t1 + 647 SELECT ID, BACKGROUNDCOLOR, LINKCOLOR, LINKURL, LOGO, LOGOALIGNMENT, LOGOBACKGROUNDCOLOR, LOGOFORMAT, TAGLINE, TEXTCOLOR, dataverse_id FROM DATAVERSETHEME + + ... etc. + +(the output is also saved in the file "tail.counted" in the pg_log directory) + + + diff --git a/scripts/database/querycount/count.pl b/scripts/database/querycount/count.pl new file mode 100755 index 00000000000..6ca178bd8e5 --- /dev/null +++ b/scripts/database/querycount/count.pl @@ -0,0 +1,37 @@ +#!/usr/bin/perl + +my $pglogfile = shift @ARGV; + +unless ( -f $pglogfile ) +{ + die "usage: ./count.pl \n"; +} + +my $pglogfilesize = (stat($pglogfile))[7]; +print "Current size: ".$pglogfilesize." bytes.\n"; +print "Press any key when ready.\n"; + +system "stty cbreak /dev/tty 2>&1"; +my $key = getc(STDIN); +system "stty -cbreak /dev/tty 2>&1"; +print "\n"; + +my $newsize = (stat($pglogfile))[7]; +my $diff = $newsize - $pglogfilesize; + +system "tail -c ".$diff." < ".$pglogfile." > tail"; + +print "Increment: ".$diff." bytes.\n"; + +system "./parse.pl < tail > tail.parsed"; + +system "cat tail.parsed | sed 's/ where.*//' | sed 's/ WHERE.*//' | sort | uniq -c | sort -nr -k 1,2 > tail.counted"; + + +print "Parsed and counted the queries. Total number:\n"; + +system "awk '{a+=\$1}END{print a}' < tail.counted"; + +print "\nQueries, counted and sorted: \n\n"; + +system "cat tail.counted"; diff --git a/scripts/database/querycount/parse.pl b/scripts/database/querycount/parse.pl new file mode 100755 index 00000000000..b0fb9ed303a --- /dev/null +++ b/scripts/database/querycount/parse.pl @@ -0,0 +1,56 @@ +#!/usr/bin/perl + +while (<>) +{ + chop; + if ( /execute : (select .*)$/i || /execute : (insert .*)$/i || /execute : (update .*)$/i) + { + $select_q = $1; + + if ($select_q =~/\$1/) + { + # saving the query, will substitute parameters + #print STDERR "saving query: " . $select_q . "\n"; + + } + else + { + print $select_q . "\n"; + $select_q = ""; + } + } + elsif (/^.*[A-Z][A-Z][A-Z] >DETAIL: parameters: (.*)$/i) + { +# print STDERR "EDT detail line encountered.\n"; + unless ($select_q) + { + die "EDT DETAIL encountered (" . $_ . ", no select_q\n"; + } + + $params = $1; + + @params_ = split (",", $params); + + for $p (@params_) + { + $p =~s/^ *//; + $p =~s/ *$//; + $p =~s/ *=/=/g; + $p =~s/= */=/g; + +# print STDERR $p . "\n"; + + ($name,$value) = split ("=", $p); + + $name =~s/^\$//g; + +# print STDERR "name: $name, value: $value\n"; + + + $select_q =~s/\$$name/$value/ge; + } + + print $select_q . "\n"; + $select_q = ""; + } +} From d1d39fc2e2e124e9f224a3469b0aa9b11e48272d Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Tue, 24 Jul 2018 15:05:37 -0400 Subject: [PATCH 025/701] groups now use TimeoutChache --- .../iq/dataverse/PermissionServiceBean.java | 55 ++++-- .../groups/GroupServiceBean.java | 2 + .../impl/builtin/BuiltInGroupsProvider.java | 7 +- .../explicit/ExplicitGroupServiceBean.java | 19 ++- .../impl/ipaddress/IpGroupsServiceBean.java | 15 +- .../impl/shib/ShibGroupServiceBean.java | 66 ++++---- .../impl/ListDataverseContentCommand.java | 7 +- .../iq/dataverse/util/TimeoutCache.java | 158 ++++++++++++++++++ 8 files changed, 270 insertions(+), 59 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/util/TimeoutCache.java diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java index f75e3274caa..280fe175cd4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java @@ -104,9 +104,9 @@ public Set get() { public boolean has(Permission p) { return hasPermissionsFor(request, subject, EnumSet.of(p)); } - + public boolean has(Set p) { - if(p.isEmpty()){ + if (p.isEmpty()) { return true; } return hasPermissionsFor(request, subject, p); @@ -208,29 +208,55 @@ public boolean has(String pName) { } - public Collection userPermissions(User user, DvObject dvo){ + public Collection userPermissions(User user, DvObject dvo) { String powerfull_roles = "select id from dataverserole where (permissionbits&12)!=0"; List roles = em.createNativeQuery(powerfull_roles).getResultList(); String x = "select id from dataverserole where (permissionbits&12)!=0"; return null; } + public List assignmentsOn(DvObject d) { return em.createNamedQuery("RoleAssignment.listByDefinitionPointId", RoleAssignment.class) .setParameter("definitionPointId", d.getId()).getResultList(); } - + public boolean hasPermissionsFor(DataverseRequest req, DvObject dvo, Set p) { - return hasPermissionsFor(req.getUser(), dvo, p); + User user = req.getUser(); + if (user.isSuperuser()) { + return true; + } else if (!user.isAuthenticated()) { + Set p_copy = EnumSet.copyOf(p); + p_copy.retainAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY); + if (!p_copy.isEmpty()) { + return false; + } + } + // Start with permissions specifically given to the user + if (hasPermissionsForSingleRoleAssignee(user, dvo, p)) { + return true; + } + + // Add permissions gained from groups + for (Group g : groupService.groupsFor(req, dvo)) { + if (hasPermissionsForSingleRoleAssignee(g, dvo, p)) { + return true; + } + } + + return false; } public boolean hasPermissionsFor(RoleAssignee ra, DvObject dvo, Set p) { - if (ra instanceof User){ + if (ra instanceof User) { User user = (User) ra; - if (user.isSuperuser()){ + if (user.isSuperuser()) { return true; - } - else if (!user.isAuthenticated() && EnumSet.copyOf(p).retainAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY)) { - return false; + } else if (!user.isAuthenticated()) { + Set p_copy = EnumSet.copyOf(p); + p_copy.retainAll(PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY); + if (!p_copy.isEmpty()) { + return false; + } } } @@ -248,7 +274,7 @@ else if (!user.isAuthenticated() && EnumSet.copyOf(p).retainAll(PERMISSIONS_FOR_ return false; } - + private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, Set p) { // File special case. if (d instanceof DataFile && p.contains(Permission.DownloadFile)) { @@ -266,10 +292,10 @@ private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, } } } - if(p.isEmpty()){ + if (p.isEmpty()) { return true; } - + // Direct assignments to ra on d for (RoleAssignment asmnt : assignmentsFor(ra, d)) { p.removeAll(asmnt.getRole().permissions()); @@ -279,8 +305,7 @@ private boolean hasPermissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d, } return p.isEmpty(); } - - + /** * Finds all the permissions the {@link User} in {@code req} has over * {@code dvo}, in the context of {@code req}. diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java index 79f2e85613f..c3479709dc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/GroupServiceBean.java @@ -13,9 +13,11 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.shib.ShibGroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.util.TimeoutCache; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/builtin/BuiltInGroupsProvider.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/builtin/BuiltInGroupsProvider.java index cb6da272dda..cd80384eb43 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/builtin/BuiltInGroupsProvider.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/builtin/BuiltInGroupsProvider.java @@ -46,9 +46,10 @@ public Set groupsFor(DataverseRequest req, DvObject dvo ) { @Override public Set groupsFor( RoleAssignee ra, DvObject dvo ) { if ( ra instanceof User) { - return (Set) ((ra instanceof AuthenticatedUser) - ? CollectionHelper.asSet(AllUsers.get(), AuthenticatedUsers.get()) - : Collections.singleton(AllUsers.get())); + if (ra instanceof AuthenticatedUser){ + return CollectionHelper.asSet(AllUsers.get(), AuthenticatedUsers.get()); + } + return Collections.singleton(AllUsers.get()); } else { return Collections.emptySet(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java index 3e49fca3b65..5d9258b396d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/explicit/ExplicitGroupServiceBean.java @@ -4,12 +4,14 @@ import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.util.TimeoutCache; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.TreeSet; +import java.util.logging.Logger; import java.util.stream.Collectors; import javax.annotation.PostConstruct; import javax.ejb.EJB; @@ -29,6 +31,7 @@ @Stateless public class ExplicitGroupServiceBean { + private static final Logger logger = Logger.getLogger(ExplicitGroupServiceBean.class.getName()); @EJB private RoleAssigneeServiceBean roleAssigneeSvc; @@ -114,13 +117,22 @@ public Set findAvailableFor( DvObject d ) { return provider.updateProvider( egs ); } + + // One minute cache with max of 10 entries + TimeoutCache> groupCache = new TimeoutCache<>(10, 60*1000); /** * Finds all the explicit groups {@code ra} is a member of. * @param ra the role assignee whose membership list we seek * @return set of the explicit groups that contain {@code ra}. */ public Set findGroups( RoleAssignee ra ) { - return findClosure(findDirectlyContainingGroups(ra)); + Set closure = groupCache.get(ra); + if (closure == null){ + logger.info("Explicit cache miss " + ra); + closure = findClosure(findDirectlyContainingGroups(ra)); + groupCache.put(ra, closure); + } + return closure; } /** @@ -156,6 +168,7 @@ public Set findDirectlyContainingGroups( RoleAssignee ra ) { } } + /** * Finds all the groups {@code ra} is a member of, in the context of {@code o}. * This includes both direct and indirect memberships. @@ -164,9 +177,7 @@ public Set findDirectlyContainingGroups( RoleAssignee ra ) { * @return All the groups in {@code o}'s context that {@code ra} is a member of. */ public Set findGroups( RoleAssignee ra, DvObject o ) { - Set directGroups = findDirectGroups(ra, o); - Set closure = findClosure(directGroups); - return closure.stream() + return findGroups(ra).stream() .filter( g -> g.owner.isAncestorOf(o) ) .collect( Collectors.toSet() ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java index d6cfb8b7f6e..89cb58b04c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IPv4Address; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IPv6Address; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; +import edu.harvard.iq.dataverse.util.TimeoutCache; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -98,13 +99,20 @@ public List findAll() { return em.createNamedQuery("IpGroup.findAll", IpGroup.class).getResultList(); } + // One minute cache with max of 10 entries + TimeoutCache> groupCache = new TimeoutCache<>(10, 60*1000); public Set findAllIncludingIp( IpAddress ipa ) { + Set cached = groupCache.get(ipa); + if (cached != null){ + return cached; + } + logger.info("IP cache miss " + ipa); if ( ipa instanceof IPv4Address ) { IPv4Address ip4 = (IPv4Address) ipa; List groupList = em.createNamedQuery("IPv4Range.findGroupsContainingAddressAsLong", IpGroup.class) .setParameter("addressAsLong", ip4.toBigInteger()).getResultList(); return new HashSet<>(groupList); - + } else if ( ipa instanceof IPv6Address ) { IPv6Address ip6 = (IPv6Address) ipa; long[] ip6arr = ip6.toLongArray(); @@ -114,8 +122,9 @@ public Set findAllIncludingIp( IpAddress ipa ) { .setParameter("c", ip6arr[2]) .setParameter("d", ip6arr[3]) .getResultList(); - return new HashSet<>(groupList); - + cached = new HashSet<>(groupList); + return groupCache.put(ipa, cached); + } else { throw new IllegalArgumentException( "Unknown IpAddress type: " + ipa.getClass() + " (for IpAddress:" + ipa + ")" ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java index e4876b5e046..eb7778b84c6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java @@ -6,6 +6,7 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; +import edu.harvard.iq.dataverse.util.TimeoutCache; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -39,7 +40,7 @@ public class ShibGroupServiceBean { GroupServiceBean groupService; @EJB ActionLogServiceBean actionLogSvc; - + /** * @return A ShibGroup or null. */ @@ -61,48 +62,57 @@ public List findAll() { public ShibGroup save(String name, String shibIdpAttribute, String shibIdp) { ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.GlobalGroups, "shibCreate"); - alr.setInfo( name + ": " + shibIdp + "/" + shibIdpAttribute ); - + alr.setInfo(name + ": " + shibIdp + "/" + shibIdpAttribute); + ShibGroup institutionalGroup = new ShibGroup(name, shibIdpAttribute, shibIdp, groupService.getShibGroupProvider()); em.persist(institutionalGroup); em.flush(); ShibGroup merged = em.merge(institutionalGroup); - + actionLogSvc.log(alr); return merged; } + // One minute cache with max of 10 entries + TimeoutCache> groupCache = new TimeoutCache<>(10, 60 * 1000); + public Set findFor(AuthenticatedUser authenticatedUser) { - Set groupsForUser = new HashSet<>(); - String shibIdp = authenticatedUser.getShibIdentityProvider(); - logger.fine("IdP for user " + authenticatedUser.getIdentifier() + " is " + shibIdp); - if (shibIdp != null) { - /** - * @todo Rather than a straight string equality match, we have a - * requirement to support regular expressions: - * https://docs.google.com/document/d/12Qru8Gjq4oDUiodI00oObHJog65S7QzFfFZuPU3n8aU/edit?usp=sharing - */ - TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ShibGroup as o WHERE o.pattern =:shibIdP", ShibGroup.class); - typedQuery.setParameter("shibIdP", shibIdp); - List matches = typedQuery.getResultList(); - groupsForUser.addAll(matches); + Set groupsForUser = groupCache.get(authenticatedUser); + if (groupsForUser == null) { + String shibIdp = authenticatedUser.getShibIdentityProvider(); + logger.fine("IdP for user " + authenticatedUser.getIdentifier() + " is " + shibIdp); + if (shibIdp != null) { + groupsForUser = new HashSet<>(); + /** + * @todo Rather than a straight string equality match, we have a + * requirement to support regular expressions: + * https://docs.google.com/document/d/12Qru8Gjq4oDUiodI00oObHJog65S7QzFfFZuPU3n8aU/edit?usp=sharing + */ + TypedQuery typedQuery = em.createQuery("SELECT OBJECT(o) FROM ShibGroup as o WHERE o.pattern =:shibIdP", ShibGroup.class); + typedQuery.setParameter("shibIdP", shibIdp); + List matches = typedQuery.getResultList(); + groupsForUser.addAll(matches); + /** + * @todo In addition to supporting institution-wide Shibboleth + * groups (Harvard, UNC, etc.), allow arbitrary Shibboleth + * attributes to be matched (with a regex) such as "memberOf" + * etc. + */ + + groupCache.put(authenticatedUser, groupsForUser); + } } - /** - * @todo In addition to supporting institution-wide Shibboleth groups - * (Harvard, UNC, etc.), allow arbitrary Shibboleth attributes to be - * matched (with a regex) such as "memberOf" etc. - */ return groupsForUser; } public boolean delete(ShibGroup doomed) throws Exception { ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.GlobalGroups, "shibDelete"); - alr.setInfo( doomed.getName() + ":" + doomed.getIdentifier() ); - + alr.setInfo(doomed.getName() + ":" + doomed.getIdentifier()); + List assignments = roleAssigneeSvc.getAssignmentsFor(doomed.getIdentifier()); if (assignments.isEmpty()) { em.remove(doomed); - actionLogSvc.log( alr ); + actionLogSvc.log(alr); return true; } else { /** @@ -114,9 +124,9 @@ public boolean delete(ShibGroup doomed) throws Exception { } String message = "Could not delete Shibboleth group id " + doomed.getId() + " due to existing role assignments: " + assignmentIds; logger.info(message); - actionLogSvc.log( alr.setActionResult(ActionLogRecord.Result.BadRequest) - .setInfo( alr.getInfo() + "// " + message ) ); - + actionLogSvc.log(alr.setActionResult(ActionLogRecord.Result.BadRequest) + .setInfo(alr.getInfo() + "// " + message)); + throw new Exception(message); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java index 8b895387c57..0f719155135 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java @@ -41,8 +41,7 @@ public List execute(CommandContext ctxt) throws CommandException { if (user.isSuperuser()) { result.addAll(ctxt.datasets().findByOwnerId(dvToList.getId())); result.addAll(ctxt.dataverses().findByOwnerId(dvToList.getId())); - } else if (user.isAuthenticated()) { - AuthenticatedUser au = (AuthenticatedUser) user; + } else { List datasets = ctxt.datasets().findByOwnerId(dvToList.getId()); int i = 0; long t0 = System.currentTimeMillis(); @@ -63,11 +62,7 @@ public List execute(CommandContext ctxt) throws CommandException { } } logger.info(""+(System.currentTimeMillis()-t0)); - } else { - result.addAll(ctxt.datasets().findPublishedByOwnerId(dvToList.getId())); - result.addAll(ctxt.dataverses().findPublishedByOwnerId(dvToList.getId())); } - return result; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/TimeoutCache.java b/src/main/java/edu/harvard/iq/dataverse/util/TimeoutCache.java new file mode 100644 index 00000000000..2f2aba73ba4 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/util/TimeoutCache.java @@ -0,0 +1,158 @@ +package edu.harvard.iq.dataverse.util; + +import java.util.AbstractMap; +import java.util.LinkedHashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; + +public class TimeoutCache implements Map { + + private class TimeNode { + + public V val; + public long time; + + public TimeNode(V val, long time) { + this.val = val; + this.time = time; + } + } + + private final int capacity; + private final LinkedHashMap> map; + private final int timeout; + + /* timout in milliseconds + */ + public TimeoutCache(int capacity, int timeout) { + this.capacity = capacity; + this.timeout = timeout; + + // (capacity * 4 + 2) /3 accounts for load factor of 3/4 + this.map = new LinkedHashMap>((capacity * 4 + 2) / 3, 0.75f, false) { + protected boolean removeEldestEntry(){ + return size() > capacity; + } + }; + } + + @Override + public V get(Object key) { + K k = (K) key; + TimeNode value = this.map.get(k); + long now = System.currentTimeMillis(); + if (value == null) { + return null; + } else if ((now - value.time) > timeout) { + map.remove(k); + return null; + } + return value.val; + } + + @Override + public V put(K key, V val) { + if (map.size() == this.capacity) { + cull(); + } + TimeNode ret = map.put(key, new TimeNode<>(val, System.currentTimeMillis())); + if (ret == null){ + return null; + } + return ret.val; + } + + /* becomes out of date as entries time out + */ + @Override + public int size() { + cull(); + return map.size(); + } + + /* becomes out of date as entries time out + */ + @Override + public boolean isEmpty() { + cull(); + return map.isEmpty(); + } + + /* becomes out of date as entries time out + */ + @Override + public boolean containsKey(Object key) { + K k = (K) key; + return get(k) != null; + } + + /* becomes out of date as entries time out + */ + @Override + public boolean containsValue(Object val) { + V v = (V) val; + return values().contains(v); + } + + @Override + public V remove(Object o) { + return map.remove(o).val; + } + + @Override + public void putAll(Map map) { + for (Entry pair : map.entrySet()) { + put(pair.getKey(), pair.getValue()); + } + } + + @Override + public void clear() { + map.clear(); + } + + @Override + public Set keySet() { + cull(); + return map.keySet(); + } + + @Override + public Set values() { + cull(); + Set values = new HashSet<>(capacity); + for (TimeNode val : map.values()) { + values.add(val.val); + } + return values; + } + + @Override + public Set> entrySet() { + cull(); + Set> entries = new HashSet<>(capacity); + for (Entry> entry : map.entrySet()) { + entries.add(new AbstractMap.SimpleEntry<>(entry.getKey(), entry.getValue().val)); + } + return entries; + } + + /* purges old entries from the cache + */ + private void cull() { + long now = System.currentTimeMillis(); + Iterator> it = map.values().iterator(); + while (it.hasNext()) { + TimeNode v = it.next(); + long dt = now - v.time; + // dt shouldn't be less than 0, but time can be weird. + if (dt < 0 || dt > timeout) { + it.remove(); + } else { + break; + } + } + } +} From 8d2c6406972863493444f53e20e63cae4abde500 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Tue, 24 Jul 2018 16:39:36 -0400 Subject: [PATCH 026/701] #4610 fix failing test --- src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 04c5a140f2b..d9a9fd7d6cb 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -59,7 +59,8 @@ public static void setUpClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); removeExcludeEmail.then().assertThat() .statusCode(200); - + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work + Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() .statusCode(200); @@ -67,6 +68,7 @@ public static void setUpClass() { Response removeUploadMethods = UtilIT.deleteSetting(SettingsServiceBean.Key.UploadMethods); removeUploadMethods.then().assertThat() .statusCode(200); + */ } @AfterClass @@ -79,7 +81,7 @@ public static void afterClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); removeExcludeEmail.then().assertThat() .statusCode(200); - + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() .statusCode(200); @@ -87,6 +89,7 @@ public static void afterClass() { Response removeUploadMethods = UtilIT.deleteSetting(SettingsServiceBean.Key.UploadMethods); removeUploadMethods.then().assertThat() .statusCode(200); + */ } @Test From 1debd4edb40415d08056006c9784f6d39ef53ad2 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 25 Jul 2018 09:21:33 -0400 Subject: [PATCH 027/701] #4610 limit page refresh on file upload --- src/main/webapp/editFilesFragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 639faf11c6d..bd8cc263540 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -209,7 +209,7 @@ disabled="#{!(datasetPage || EditDatafilesPage.showFileUploadComponent())}" fileUploadListener="#{EditDatafilesPage.handleFileUpload}" process="filesTable" - update="@form, :datasetForm, @([id$=filesButtons])" + update=":datasetForm:filesTable, @([id$=filesButtons])" label="#{bundle['file.selectToAddBtn']}" oncomplete="javascript:dataset_fileupload_rebind();uploadFinished(PF('fileUploadWidget'));uploadFinished();" onstart="javascript:uploadWidgetDropRemoveMsg();uploadStarted();" From 68f95b0c2a51af4e83e843a3f70dfc7c4a236b74 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 25 Jul 2018 09:46:42 -0400 Subject: [PATCH 028/701] #4610 fix deprecated autoUpdate tag --- src/main/webapp/dataverse.xhtml | 3 ++- src/main/webapp/metadataFragment.xhtml | 6 ++++-- src/main/webapp/themeAndWidgetsFragment.xhtml | 3 ++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/main/webapp/dataverse.xhtml b/src/main/webapp/dataverse.xhtml index d501099b561..3b86148fa3e 100644 --- a/src/main/webapp/dataverse.xhtml +++ b/src/main/webapp/dataverse.xhtml @@ -131,7 +131,8 @@
- + + - + +
- + +
diff --git a/src/main/webapp/themeAndWidgetsFragment.xhtml b/src/main/webapp/themeAndWidgetsFragment.xhtml index 315ae30376e..b1931ae7e5a 100644 --- a/src/main/webapp/themeAndWidgetsFragment.xhtml +++ b/src/main/webapp/themeAndWidgetsFragment.xhtml @@ -7,7 +7,8 @@ - + +
From b35fb5fe9899c659f493447df9662f2638a78739 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 25 Jul 2018 11:07:00 -0400 Subject: [PATCH 029/701] #4610 make dropbox subordinate to native --- .../edu/harvard/iq/dataverse/SettingsWrapper.java | 4 ---- .../harvard/iq/dataverse/util/SystemConfig.java | 14 ++------------ src/main/webapp/editFilesFragment.xhtml | 12 +++--------- 3 files changed, 5 insertions(+), 25 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java index 19177d81216..56347eb5430 100644 --- a/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java +++ b/src/main/java/edu/harvard/iq/dataverse/SettingsWrapper.java @@ -154,10 +154,6 @@ public boolean isHTTPUpload(){ return systemConfig.isHTTPUpload(); } - public boolean isDropBoxUpload(){ - return systemConfig.isDropBoxUpload(); - } - public boolean isDataFilePIDSequentialDependent(){ return systemConfig.isDataFilePIDSequentialDependent(); } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java index 6bc67191d51..5e737f16943 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SystemConfig.java @@ -839,13 +839,8 @@ public enum FileUploadMethods { * Traditional Dataverse file handling, which tends to involve users * uploading and downloading files using a browser or APIs. */ - NATIVE("native/http"), - /** - * Traditional Dataverse file handling, which tends to involve users - * uploading and downloading files using a browser or APIs. - */ - - DROPBOX("native/dropbox"); + NATIVE("native/http"); + private final String text; @@ -980,11 +975,6 @@ public boolean isHTTPUpload(){ return getUploadMethodAvailable(SystemConfig.FileUploadMethods.NATIVE.toString()); } - public boolean isDropBoxUpload(){ - return getUploadMethodAvailable(SystemConfig.FileUploadMethods.DROPBOX.toString()); - } - - public boolean isRsyncDownload() { String downloadMethods = settingsService.getValueForKey(SettingsServiceBean.Key.DownloadMethods); diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index bd8cc263540..008b0997c4e 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -224,8 +224,8 @@
- -
+ +
@@ -314,15 +314,10 @@ - -
- -
-
- +
@@ -332,7 +327,6 @@

#{bundle['file.createUploadDisabled']}

-
From 3b033d683804519bb8730cfbccd90631da86a44e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 25 Jul 2018 11:14:23 -0400 Subject: [PATCH 030/701] update docs to indicate that Dropbox only is not a valid mode #4610 --- doc/sphinx-guides/source/installation/config.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index d772e3d1cc9..f4636c5f891 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1255,12 +1255,11 @@ The URL for your Repository Storage Abstraction Layer (RSAL) installation. This This setting controls which upload methods are available to users of your installation of Dataverse. The following upload methods are available: - ``native/http``: Corresponds to "Upload with HTTP via your browser" and APIs that use HTTP (SWORD and native). -- ``native/dropbox``: Corresponds to "Upload with Dropbox". Note that the JVM option ``dataverse.dropbox.key`` is also required. - ``dcm/rsync+ssh``: Corresponds to "Upload with rsync+ssh via Data Capture Module (DCM)". A lot of setup is required, as explained in the :doc:`/developers/big-data-support` section of the Dev Guide. Out of the box only ``native/http`` is enabled and will work without further configuration. To add multiple upload method, separate them using a comma like this: -``curl -X PUT -d 'native/http,native/dropbox,dcm/rsync+ssh' http://localhost:8080/api/admin/settings/:UploadMethods`` +``curl -X PUT -d 'native/http,dcm/rsync+ssh' http://localhost:8080/api/admin/settings/:UploadMethods`` You'll always want at least one upload method, so the easiest way to remove one of them is to simply ``PUT`` just the one you want, like this: From 7452e66ae92b6d9f3af64ef1a49576221b87775e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 25 Jul 2018 15:55:30 -0400 Subject: [PATCH 031/701] update createUploadDisabled text for rsync #4610 --- src/main/java/Bundle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index ef7d5ac37f6..6514ac72a47 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1313,7 +1313,7 @@ file.selectToAddBtn=Select Files to Add file.selectToAdd.tipLimit=File upload limit is {0} per file. file.selectToAdd.tipMoreInformation=For more information about supported file formats, please refer to the User Guide. file.selectToAdd.dragdropMsg=Drag and drop files here. -file.createUploadDisabled=Once you have saved your dataset, you can upload your data using the "Upload Files" button on the dataset page. For more information about supported file formats, please refer to the User Guide. +file.createUploadDisabled=rsync allows you to run a script that transfers your files into or out of Dataverse via SSH. It is useful for extremely large files, or packages containing a large number of files. Once you have saved this dataset, you can upload your data using rsync via the "Upload Files" button on the dataset page. file.fromDropbox=Upload from Dropbox file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. file.fromRsync=Upload with rsync+ssh via Data Capture Module (DCM) From a867f5b7a8c1499cedf27ffb481b71b6d462e626 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 25 Jul 2018 16:08:03 -0400 Subject: [PATCH 032/701] indent rsync help block to match native/http #4610 --- src/main/webapp/editFilesFragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 008b0997c4e..060e00aa1c1 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -322,7 +322,7 @@
-
+

#{bundle['file.createUploadDisabled']}

From f1b1d6cb283bf73b5b0246641ee3d0fbad8ed9f0 Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Thu, 26 Jul 2018 10:17:19 -0400 Subject: [PATCH 033/701] broken --- .../harvard/iq/dataverse/api/Dataverses.java | 751 +++++++++--------- .../impl/ipaddress/IpGroupsServiceBean.java | 3 +- .../impl/ListDataverseContentCommand.java | 76 -- .../iq/dataverse/mydata/DataRetrieverAPI.java | 60 +- .../iq/dataverse/mydata/MyDataFinder.java | 57 +- 5 files changed, 388 insertions(+), 559 deletions(-) delete mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index fcfd84224af..a05f4f9d00c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -37,7 +37,6 @@ import edu.harvard.iq.dataverse.engine.command.impl.GetExplicitGroupCommand; import edu.harvard.iq.dataverse.engine.command.impl.ImportDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.LinkDataverseCommand; -import edu.harvard.iq.dataverse.engine.command.impl.ListDataverseContentCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListExplicitGroupsCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListFacetsCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListMetadataBlocksCommand; @@ -52,6 +51,9 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateExplicitGroupCommand; +import edu.harvard.iq.dataverse.search.SearchException; +import edu.harvard.iq.dataverse.search.SolrQueryResponse; +import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.StringUtil; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; @@ -92,51 +94,53 @@ import javax.ws.rs.core.Response.Status; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; +import java.util.Arrays; import java.util.Date; import java.util.Optional; /** * A REST API for dataverses. + * * @author michael */ @Stateless @Path("dataverses") public class Dataverses extends AbstractApiBean { - + private static final Logger logger = Logger.getLogger(Dataverses.class.getCanonicalName()); @EJB ExplicitGroupServiceBean explicitGroupSvc; // @EJB // SystemConfig systemConfig; - - @POST - public Response addRoot( String body ) { + + @POST + public Response addRoot(String body) { logger.info("Creating root dataverse"); - return addDataverse( body, ""); - } - - @POST - @Path("{identifier}") - public Response addDataverse( String body, @PathParam("identifier") String parentIdtf ) { - + return addDataverse(body, ""); + } + + @POST + @Path("{identifier}") + public Response addDataverse(String body, @PathParam("identifier") String parentIdtf) { + Dataverse d; JsonObject dvJson; - try ( StringReader rdr = new StringReader(body) ) { + try (StringReader rdr = new StringReader(body)) { dvJson = Json.createReader(rdr).readObject(); d = jsonParser().parseDataverse(dvJson); - } catch ( JsonParsingException jpe ) { + } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); - return error( Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage() ); + return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); - return error( Response.Status.BAD_REQUEST, - "Error parsing the POSTed json into a dataverse: " + ex.getMessage() ); + return error(Response.Status.BAD_REQUEST, + "Error parsing the POSTed json into a dataverse: " + ex.getMessage()); } - - try { - if ( ! parentIdtf.isEmpty() ) { - Dataverse owner = findDataverseOrDie( parentIdtf ); + + try { + if (!parentIdtf.isEmpty()) { + Dataverse owner = findDataverseOrDie(parentIdtf); d.setOwner(owner); } @@ -146,33 +150,33 @@ public Response addDataverse( String body, @PathParam("identifier") String paren } AuthenticatedUser u = findAuthenticatedUserOrDie(); - d = execCommand( new CreateDataverseCommand(d, createDataverseRequest(u), null, null) ); - return created( "/dataverses/"+d.getAlias(), json(d) ); - } catch ( WrappedResponse ww ) { - Throwable cause = ww.getCause(); - StringBuilder sb = new StringBuilder(); - if (cause == null) { - return ww.refineResponse("cause was null!"); - } - while (cause.getCause() != null) { - cause = cause.getCause(); - if (cause instanceof ConstraintViolationException) { - ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause; - for (ConstraintViolation violation : constraintViolationException.getConstraintViolations()) { - sb.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ") - .append(violation.getPropertyPath()).append(" at ") - .append(violation.getLeafBean()).append(" - ") - .append(violation.getMessage()); - } - } - } - String error = sb.toString(); - if (!error.isEmpty()) { - logger.log(Level.INFO, error); - return ww.refineResponse(error); + d = execCommand(new CreateDataverseCommand(d, createDataverseRequest(u), null, null)); + return created("/dataverses/" + d.getAlias(), json(d)); + } catch (WrappedResponse ww) { + Throwable cause = ww.getCause(); + StringBuilder sb = new StringBuilder(); + if (cause == null) { + return ww.refineResponse("cause was null!"); + } + while (cause.getCause() != null) { + cause = cause.getCause(); + if (cause instanceof ConstraintViolationException) { + ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause; + for (ConstraintViolation violation : constraintViolationException.getConstraintViolations()) { + sb.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ") + .append(violation.getPropertyPath()).append(" at ") + .append(violation.getLeafBean()).append(" - ") + .append(violation.getMessage()); } + } + } + String error = sb.toString(); + if (!error.isEmpty()) { + logger.log(Level.INFO, error); + return ww.refineResponse(error); + } return ww.getResponse(); - + } catch (EJBException ex) { Throwable cause = ex; StringBuilder sb = new StringBuilder(); @@ -190,355 +194,364 @@ public Response addDataverse( String body, @PathParam("identifier") String paren } } logger.log(Level.SEVERE, sb.toString()); - return error( Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + sb.toString() ); - } catch ( Exception ex ) { - logger.log(Level.SEVERE, "Error creating dataverse", ex); - return error( Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + ex.getMessage() ); - + return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + sb.toString()); + } catch (Exception ex) { + logger.log(Level.SEVERE, "Error creating dataverse", ex); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + ex.getMessage()); + } - } - + } + @POST @Path("{identifier}/datasets") - public Response createDataset( String jsonBody, @PathParam("identifier") String parentIdtf ) { + public Response createDataset(String jsonBody, @PathParam("identifier") String parentIdtf) { try { User u = findUserOrDie(); Dataverse owner = findDataverseOrDie(parentIdtf); Dataset ds = parseDataset(jsonBody); ds.setOwner(owner); - - if ( ds.getVersions().isEmpty() ) { + + if (ds.getVersions().isEmpty()) { return badRequest("Please provide initial version in the dataset json"); } - + // clean possible version metadata - DatasetVersion version = ds.getVersions().get(0); + DatasetVersion version = ds.getVersions().get(0); version.setMinorVersionNumber(null); version.setVersionNumber(null); version.setVersionState(DatasetVersion.VersionState.DRAFT); - + ds.setAuthority(null); ds.setIdentifier(null); ds.setProtocol(null); ds.setGlobalIdCreateTime(null); - + Dataset managedDs = execCommand(new CreateNewDatasetCommand(ds, createDataverseRequest(u))); return created("/datasets/" + managedDs.getId(), Json.createObjectBuilder() .add("id", managedDs.getId()) .add("persistentId", managedDs.getGlobalIdString()) ); - - } catch ( WrappedResponse ex ) { + + } catch (WrappedResponse ex) { return ex.getResponse(); } } - - + @POST @Path("{identifier}/datasets/:import") - public Response importDataset( String jsonBody, @PathParam("identifier") String parentIdtf, @QueryParam("pid") String pidParam, @QueryParam("release") String releaseParam ) { + public Response importDataset(String jsonBody, @PathParam("identifier") String parentIdtf, @QueryParam("pid") String pidParam, @QueryParam("release") String releaseParam) { try { User u = findUserOrDie(); Dataverse owner = findDataverseOrDie(parentIdtf); Dataset ds = parseDataset(jsonBody); ds.setOwner(owner); - - if ( ds.getVersions().isEmpty() ) { + + if (ds.getVersions().isEmpty()) { return badRequest("Supplied json must contain a single dataset version."); } - + DatasetVersion version = ds.getVersions().get(0); - if ( version.getVersionState() == null ) { + if (version.getVersionState() == null) { version.setVersionState(DatasetVersion.VersionState.DRAFT); } - - if ( nonEmpty(pidParam) ) { + + if (nonEmpty(pidParam)) { Optional maybePid = GlobalId.parse(pidParam); - if ( maybePid.isPresent() ) { + if (maybePid.isPresent()) { ds.setGlobalId(maybePid.get()); } else { // unparsable PID passed. Terminate. return badRequest("Cannot parse the PID parameter '" + pidParam + "'. Make sure it is in valid form - see Dataverse Native API documentation."); } } - - if ( ds.getIdentifier() == null ) { + + if (ds.getIdentifier() == null) { return badRequest("Please provide a persistent identifier, either by including it in the JSON, or by using the pid query parameter."); } boolean shouldRelease = StringUtil.isTrue(releaseParam); DataverseRequest request = createDataverseRequest(u); - - if ( shouldRelease ) { + + if (shouldRelease) { DatasetVersion latestVersion = ds.getLatestVersion(); latestVersion.setVersionState(DatasetVersion.VersionState.RELEASED); latestVersion.setVersionNumber(1l); latestVersion.setMinorVersionNumber(0l); - if ( latestVersion.getCreateTime() != null ) { + if (latestVersion.getCreateTime() != null) { latestVersion.setCreateTime(new Date()); } - if ( latestVersion.getLastUpdateTime() != null ) { - latestVersion.setLastUpdateTime(new Date() ); + if (latestVersion.getLastUpdateTime() != null) { + latestVersion.setLastUpdateTime(new Date()); } } - + Dataset managedDs = execCommand(new ImportDatasetCommand(ds, request)); JsonObjectBuilder responseBld = Json.createObjectBuilder() .add("id", managedDs.getId()) .add("persistentId", managedDs.getGlobalIdString()); - - if ( shouldRelease ) { + + if (shouldRelease) { PublishDatasetResult res = execCommand(new PublishDatasetCommand(managedDs, request, false, shouldRelease)); responseBld.add("releaseCompleted", res.isCompleted()); } - + return created("/datasets/" + managedDs.getId(), responseBld); - - } catch ( WrappedResponse ex ) { + + } catch (WrappedResponse ex) { return ex.getResponse(); } } - - private Dataset parseDataset(String datasetJson ) throws WrappedResponse { - try ( StringReader rdr = new StringReader(datasetJson) ) { + + private Dataset parseDataset(String datasetJson) throws WrappedResponse { + try (StringReader rdr = new StringReader(datasetJson)) { return jsonParser().parseDataset(Json.createReader(rdr).readObject()); - } catch ( JsonParsingException | JsonParseException jpe ) { + } catch (JsonParsingException | JsonParseException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); - throw new WrappedResponse( error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()) ); + throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage())); } } - - @GET - @Path("{identifier}") - public Response viewDataverse( @PathParam("identifier") String idtf ) { - return allowCors(response( req -> ok(json(execCommand( - new GetDataverseCommand(req, findDataverseOrDie(idtf))))))); - } - - @DELETE - @Path("{identifier}") - public Response deleteDataverse( @PathParam("identifier") String idtf ) { - return response( req -> { - execCommand( new DeleteDataverseCommand(req, findDataverseOrDie(idtf))); - return ok( "Dataverse " + idtf +" deleted"); + + @GET + @Path("{identifier}") + public Response viewDataverse(@PathParam("identifier") String idtf) { + return allowCors(response(req -> ok(json(execCommand( + new GetDataverseCommand(req, findDataverseOrDie(idtf))))))); + } + + @DELETE + @Path("{identifier}") + public Response deleteDataverse(@PathParam("identifier") String idtf) { + return response(req -> { + execCommand(new DeleteDataverseCommand(req, findDataverseOrDie(idtf))); + return ok("Dataverse " + idtf + " deleted"); }); - } - - @DELETE - @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}") - public Response deleteDataverseLinkingDataverse( @PathParam("linkingDataverseId") String linkingDataverseId, @PathParam("linkedDataverseId") String linkedDataverseId) { - boolean index = true; - return response(req -> { - execCommand(new DeleteDataverseLinkingDataverseCommand(req, findDataverseOrDie(linkingDataverseId), findDataverseLinkingDataverseOrDie(linkingDataverseId, linkedDataverseId), index)); - return ok("Link from Dataverse " + linkingDataverseId + " to linked Dataverse " + linkedDataverseId + " deleted"); + } + + @DELETE + @Path("{linkingDataverseId}/deleteLink/{linkedDataverseId}") + public Response deleteDataverseLinkingDataverse(@PathParam("linkingDataverseId") String linkingDataverseId, @PathParam("linkedDataverseId") String linkedDataverseId) { + boolean index = true; + return response(req -> { + execCommand(new DeleteDataverseLinkingDataverseCommand(req, findDataverseOrDie(linkingDataverseId), findDataverseLinkingDataverseOrDie(linkingDataverseId, linkedDataverseId), index)); + return ok("Link from Dataverse " + linkingDataverseId + " to linked Dataverse " + linkedDataverseId + " deleted"); }); - } - - @GET - @Path("{identifier}/metadatablocks") - public Response listMetadataBlocks( @PathParam("identifier") String dvIdtf ) { + } + + @GET + @Path("{identifier}/metadatablocks") + public Response listMetadataBlocks(@PathParam("identifier") String dvIdtf) { try { JsonArrayBuilder arr = Json.createArrayBuilder(); - final List blocks = execCommand( new ListMetadataBlocksCommand(createDataverseRequest(findUserOrDie()), findDataverseOrDie(dvIdtf))); - for ( MetadataBlock mdb : blocks) { - arr.add( brief.json(mdb) ); + final List blocks = execCommand(new ListMetadataBlocksCommand(createDataverseRequest(findUserOrDie()), findDataverseOrDie(dvIdtf))); + for (MetadataBlock mdb : blocks) { + arr.add(brief.json(mdb)); } return allowCors(ok(arr)); - } catch (WrappedResponse we ){ + } catch (WrappedResponse we) { return we.getResponse(); } - } - + } + @POST @Path("{identifier}/metadatablocks") @Produces(MediaType.APPLICATION_JSON) - public Response setMetadataBlocks( @PathParam("identifier")String dvIdtf, String blockIds ) { - + public Response setMetadataBlocks(@PathParam("identifier") String dvIdtf, String blockIds) { + List blocks = new LinkedList<>(); try { - for ( JsonValue blockId : Util.asJsonArray(blockIds).getValuesAs(JsonValue.class) ) { - MetadataBlock blk = (blockId.getValueType()==ValueType.NUMBER) - ? findMetadataBlock( ((JsonNumber)blockId).longValue() ) - : findMetadataBlock( ((JsonString)blockId).getString() ); - if ( blk == null ) { - return error(Response.Status.BAD_REQUEST, "Can't find metadata block '"+ blockId + "'"); + for (JsonValue blockId : Util.asJsonArray(blockIds).getValuesAs(JsonValue.class)) { + MetadataBlock blk = (blockId.getValueType() == ValueType.NUMBER) + ? findMetadataBlock(((JsonNumber) blockId).longValue()) + : findMetadataBlock(((JsonString) blockId).getString()); + if (blk == null) { + return error(Response.Status.BAD_REQUEST, "Can't find metadata block '" + blockId + "'"); } - blocks.add( blk ); + blocks.add(blk); } - } catch( Exception e ) { + } catch (Exception e) { return error(Response.Status.BAD_REQUEST, e.getMessage()); } - + try { - execCommand( new UpdateDataverseMetadataBlocksCommand.SetBlocks(createDataverseRequest(findUserOrDie()), findDataverseOrDie(dvIdtf), blocks)); + execCommand(new UpdateDataverseMetadataBlocksCommand.SetBlocks(createDataverseRequest(findUserOrDie()), findDataverseOrDie(dvIdtf), blocks)); return ok("Metadata blocks of dataverse " + dvIdtf + " updated."); - + } catch (WrappedResponse ex) { return ex.getResponse(); } } - + @GET @Path("{identifier}/metadatablocks/:isRoot") - public Response getMetadataRoot_legacy( @PathParam("identifier")String dvIdtf ) { + public Response getMetadataRoot_legacy(@PathParam("identifier") String dvIdtf) { return getMetadataRoot(dvIdtf); } - + @GET @Path("{identifier}/metadatablocks/isRoot") @Produces(MediaType.APPLICATION_JSON) - public Response getMetadataRoot( @PathParam("identifier")String dvIdtf ) { - return response( req -> { + public Response getMetadataRoot(@PathParam("identifier") String dvIdtf) { + return response(req -> { final Dataverse dataverse = findDataverseOrDie(dvIdtf); - if ( permissionSvc.request(req) - .on(dataverse) - .has(Permission.EditDataverse) ) { - return ok( dataverse.isMetadataBlockRoot() ); + if (permissionSvc.request(req) + .on(dataverse) + .has(Permission.EditDataverse)) { + return ok(dataverse.isMetadataBlockRoot()); } else { - return error( Status.FORBIDDEN, "Not authorized" ); + return error(Status.FORBIDDEN, "Not authorized"); } }); } - + @POST @Path("{identifier}/metadatablocks/:isRoot") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.WILDCARD) - public Response setMetadataRoot_legacy( @PathParam("identifier")String dvIdtf, String body ) { + public Response setMetadataRoot_legacy(@PathParam("identifier") String dvIdtf, String body) { return setMetadataRoot(dvIdtf, body); } - + @PUT @Path("{identifier}/metadatablocks/isRoot") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.WILDCARD) - public Response setMetadataRoot( @PathParam("identifier")String dvIdtf, String body ) { - return response( req -> { + public Response setMetadataRoot(@PathParam("identifier") String dvIdtf, String body) { + return response(req -> { final boolean root = parseBooleanOrDie(body); final Dataverse dataverse = findDataverseOrDie(dvIdtf); execCommand(new UpdateDataverseMetadataBlocksCommand.SetRoot(req, dataverse, root)); - return ok("Dataverse " + dataverse.getName() + " is now a metadata " + (root? "" : "non-") + "root"); + return ok("Dataverse " + dataverse.getName() + " is now a metadata " + (root ? "" : "non-") + "root"); }); } - + @GET @Path("{identifier}/facets/") /** * return list of facets for the dataverse with alias `dvIdtf` */ - public Response listFacets( @PathParam("identifier") String dvIdtf ) { - try - { - User u = findUserOrDie(); - DataverseRequest r = createDataverseRequest( u ); - Dataverse dataverse = findDataverseOrDie(dvIdtf); - JsonArrayBuilder fs = Json.createArrayBuilder(); - for( DataverseFacet f : execCommand( new ListFacetsCommand( r, dataverse ) ) ) - { - fs.add( f.getDatasetFieldType().getName() ); - } - return allowCors( ok( fs ) ); - } - catch( WrappedResponse e ) - { - return e.getResponse(); - } + public Response listFacets(@PathParam("identifier") String dvIdtf) { + try { + User u = findUserOrDie(); + DataverseRequest r = createDataverseRequest(u); + Dataverse dataverse = findDataverseOrDie(dvIdtf); + JsonArrayBuilder fs = Json.createArrayBuilder(); + for (DataverseFacet f : execCommand(new ListFacetsCommand(r, dataverse))) { + fs.add(f.getDatasetFieldType().getName()); + } + return allowCors(ok(fs)); + } catch (WrappedResponse e) { + return e.getResponse(); + } } @POST @Path("{identifier}/facets") @Produces(MediaType.APPLICATION_JSON) /** - * (not publicly documented) API endpoint for assigning facets to a dataverse. - * `curl -X POST -H "X-Dataverse-key: $ADMIN_KEY" http://localhost:8088/api/dataverses/$dv/facets --upload-file foo.json`; where foo.json contains a list of datasetField names, - * works as expected (judging by the UI). - * This triggers a 500 when '-d @foo.json' is used. + * (not publicly documented) API endpoint for assigning facets to a + * dataverse. `curl -X POST -H "X-Dataverse-key: $ADMIN_KEY" + * http://localhost:8088/api/dataverses/$dv/facets --upload-file foo.json`; + * where foo.json contains a list of datasetField names, works as expected + * (judging by the UI). This triggers a 500 when '-d @foo.json' is used. */ - public Response setFacets( @PathParam("identifier")String dvIdtf, String facetIds ) { - + public Response setFacets(@PathParam("identifier") String dvIdtf, String facetIds) { + List facets = new LinkedList<>(); - for ( JsonString facetId : Util.asJsonArray(facetIds).getValuesAs(JsonString.class) ) { + for (JsonString facetId : Util.asJsonArray(facetIds).getValuesAs(JsonString.class)) { DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); - if ( dsfType == null ) { - return error(Response.Status.BAD_REQUEST, "Can't find dataset field type '"+ facetId + "'"); + if (dsfType == null) { + return error(Response.Status.BAD_REQUEST, "Can't find dataset field type '" + facetId + "'"); } else if (!dsfType.isFacetable()) { - return error(Response.Status.BAD_REQUEST, "Dataset field type '"+ facetId + "' is not facetable"); + return error(Response.Status.BAD_REQUEST, "Dataset field type '" + facetId + "' is not facetable"); } - facets.add( dsfType ); + facets.add(dsfType); } - + try { Dataverse dataverse = findDataverseOrDie(dvIdtf); // by passing null for Featured Dataverses and DataverseFieldTypeInputLevel, those are not changed - execCommand( new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(findUserOrDie()), null) ); + execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(findUserOrDie()), null)); return ok("Facets of dataverse " + dvIdtf + " updated."); - + } catch (WrappedResponse ex) { return ex.getResponse(); } - } + } // FIXME: This listContent method is way too optimistic, always returning "ok" and never "error". - // FIXME: This listContent method should be reformatted. The indentation and whitespace is odd. - // FIXME: This method is too slow with lots of data: https://github.com/IQSS/dataverse/issues/2122 // TODO: Investigate why there was a change in the timeframe of when pull request #4350 was merged // (2438-4295-dois-for-files branch) such that a contributor API token no longer allows this method // to be called without a PermissionException being thrown. - @GET - @Path("{identifier}/contents") - public Response listContent( @PathParam("identifier") String dvIdtf ) { - DvObject.Visitor ser = new DvObject.Visitor() { - - @Override - public JsonObjectBuilder visit(Dataverse dv) { - return Json.createObjectBuilder().add("type", "dataverse") - .add("id", dv.getId()) - .add("title",dv.getName() ); - } - - @Override - public JsonObjectBuilder visit(Dataset ds) { - return json(ds).add("type", "dataset"); - } - - @Override - public JsonObjectBuilder visit(DataFile df) { throw new UnsupportedOperationException("Files don't live directly in Dataverses"); } - }; + @GET + @Path("{identifier}/contents") + public Response listContent(@PathParam("identifier") String dvIdtf) { + DvObject.Visitor ser = new DvObject.Visitor() { + + @Override + public JsonObjectBuilder visit(Dataverse dv) { + return Json.createObjectBuilder() + .add("type", "dataverse") + .add("id", dv.getId()) + .add("title", dv.getName()); + } + + @Override + public JsonObjectBuilder visit(Dataset ds) { + return json(ds).add("type", "dataset"); + } + + @Override + public JsonObjectBuilder visit(DataFile df) { + throw new UnsupportedOperationException("Files don't live directly in Dataverses"); + } + }; - return allowCors(response( req -> ok( - execCommand(new ListDataverseContentCommand(req, findDataverseOrDie(dvIdtf))) - .stream() - .map( dvo->(JsonObjectBuilder)dvo.accept(ser)) - .collect(toJsonArray())) + SolrQueryResponse solrQueryResponse; + try { + solrQueryResponse = cxtx.search().search(req, findDataverseOrDie(dvIdtf), "*", Arrays.asList("parentIdentifier:" + dvIdtf), "sort=nameSort", "order=desc", 0, true, 0); + } catch (SearchException ex) { + logger.severe("could not connect to Solr"); + return null; + } + + List solrSearchResults = solrQueryResponse.getSolrSearchResults(); + for (SolrSearchResult result : solrSearchResults) { + if(result.getType()=="dataverse"){ + } + } + return allowCors(response(req -> ok( + execCommand(new ListDataverseContentCommand(req, findDataverseOrDie(dvIdtf))) + .stream().map(dvo -> (JsonObjectBuilder) dvo.accept(ser)) + .collect(toJsonArray())) )); - } - + } + @GET - @Path("{identifier}/roles") - public Response listRoles( @PathParam("identifier") String dvIdtf ) { - return response( req -> ok( - execCommand( new ListRolesCommand(req, findDataverseOrDie(dvIdtf)) ) - .stream().map(r->json(r)) - .collect( toJsonArray() ) + @Path("{identifier}/roles") + public Response listRoles(@PathParam("identifier") String dvIdtf) { + return response(req -> ok( + execCommand(new ListRolesCommand(req, findDataverseOrDie(dvIdtf))) + .stream().map(r -> json(r)) + .collect(toJsonArray()) )); - } - - @POST - @Path("{identifier}/roles") - public Response createRole( RoleDTO roleDto, @PathParam("identifier") String dvIdtf ) { - return response( req -> ok( json(execCommand(new CreateRoleCommand(roleDto.asRole(), req, findDataverseOrDie(dvIdtf)))))); - } - - @GET - @Path("{identifier}/assignments") - public Response listAssignments( @PathParam("identifier") String dvIdtf) { - return response( req -> ok( + } + + @POST + @Path("{identifier}/roles") + public Response createRole(RoleDTO roleDto, @PathParam("identifier") String dvIdtf) { + return response(req -> ok(json(execCommand(new CreateRoleCommand(roleDto.asRole(), req, findDataverseOrDie(dvIdtf)))))); + } + + @GET + @Path("{identifier}/assignments") + public Response listAssignments(@PathParam("identifier") String dvIdtf) { + return response(req -> ok( execCommand(new ListRoleAssignments(req, findDataverseOrDie(dvIdtf))) - .stream() - .map( a -> json(a) ) - .collect(toJsonArray()) + .stream() + .map(a -> json(a)) + .collect(toJsonArray()) )); - } + } /** * This code for setting a dataverse logo via API was started when initially @@ -629,175 +642,173 @@ public Response listAssignments( @PathParam("identifier") String dvIdtf) { // return error(Status.BAD_REQUEST, "problem uploading logo: " + ex); // } // } + @POST + @Path("{identifier}/assignments") + public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey) { - @POST - @Path("{identifier}/assignments") - public Response createAssignment( RoleAssignmentDTO ra, @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { - - try { + try { final DataverseRequest req = createDataverseRequest(findUserOrDie()); final Dataverse dataverse = findDataverseOrDie(dvIdtf); RoleAssignee assignee = findAssignee(ra.getAssignee()); - if ( assignee==null ) { - return error( Status.BAD_REQUEST, "Assignee not found" ); + if (assignee == null) { + return error(Status.BAD_REQUEST, "Assignee not found"); } DataverseRole theRole; Dataverse dv = dataverse; theRole = null; - while ( (theRole==null) && (dv!=null) ) { - for ( DataverseRole aRole : rolesSvc.availableRoles(dv.getId()) ) { - if ( aRole.getAlias().equals(ra.getRole()) ) { + while ((theRole == null) && (dv != null)) { + for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) { + if (aRole.getAlias().equals(ra.getRole())) { theRole = aRole; break; } } dv = dv.getOwner(); } - if ( theRole == null ) { - return error( Status.BAD_REQUEST, "Can't find role named '" + ra.getRole() + "' in dataverse " + dataverse); - } - String privateUrlToken = null; - - return ok(json(execCommand(new AssignRoleCommand(assignee, theRole, dataverse, req, privateUrlToken)))); - - } catch (WrappedResponse ex) { - logger.log(Level.WARNING, "Can''t create assignment: {0}", ex.getMessage()); - return ex.getResponse(); - } - } - - @DELETE - @Path("{identifier}/assignments/{id}") - public Response deleteAssignment( @PathParam("id") long assignmentId, @PathParam("identifier") String dvIdtf ) { - RoleAssignment ra = em.find( RoleAssignment.class, assignmentId ); - if ( ra != null ) { + if (theRole == null) { + return error(Status.BAD_REQUEST, "Can't find role named '" + ra.getRole() + "' in dataverse " + dataverse); + } + String privateUrlToken = null; + + return ok(json(execCommand(new AssignRoleCommand(assignee, theRole, dataverse, req, privateUrlToken)))); + + } catch (WrappedResponse ex) { + logger.log(Level.WARNING, "Can''t create assignment: {0}", ex.getMessage()); + return ex.getResponse(); + } + } + + @DELETE + @Path("{identifier}/assignments/{id}") + public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam("identifier") String dvIdtf) { + RoleAssignment ra = em.find(RoleAssignment.class, assignmentId); + if (ra != null) { try { findDataverseOrDie(dvIdtf); - execCommand( new RevokeRoleCommand(ra, createDataverseRequest(findUserOrDie()))); - return ok("Role " + ra.getRole().getName() - + " revoked for assignee " + ra.getAssigneeIdentifier() - + " in " + ra.getDefinitionPoint().accept(DvObject.NamePrinter) ); + execCommand(new RevokeRoleCommand(ra, createDataverseRequest(findUserOrDie()))); + return ok("Role " + ra.getRole().getName() + + " revoked for assignee " + ra.getAssigneeIdentifier() + + " in " + ra.getDefinitionPoint().accept(DvObject.NamePrinter)); } catch (WrappedResponse ex) { return ex.getResponse(); } - } else { - return error( Status.NOT_FOUND, "Role assignment " + assignmentId + " not found" ); - } - } - + } else { + return error(Status.NOT_FOUND, "Role assignment " + assignmentId + " not found"); + } + } + @POST - @Path("{identifier}/actions/:publish") - public Response publishDataverse( @PathParam("identifier") String dvIdtf ) { + @Path("{identifier}/actions/:publish") + public Response publishDataverse(@PathParam("identifier") String dvIdtf) { try { Dataverse dv = findDataverseOrDie(dvIdtf); - return ok( json(execCommand( new PublishDataverseCommand(createDataverseRequest(findAuthenticatedUserOrDie()), dv))) ); - + return ok(json(execCommand(new PublishDataverseCommand(createDataverseRequest(findAuthenticatedUserOrDie()), dv)))); + } catch (WrappedResponse wr) { return wr.getResponse(); } } - + @POST - @Path("{identifier}/groups/") - public Response createExplicitGroup( ExplicitGroupDTO dto, @PathParam("identifier") String dvIdtf) { - return response( req ->{ + @Path("{identifier}/groups/") + public Response createExplicitGroup(ExplicitGroupDTO dto, @PathParam("identifier") String dvIdtf) { + return response(req -> { ExplicitGroupProvider prv = explicitGroupSvc.getProvider(); ExplicitGroup newGroup = dto.apply(prv.makeGroup()); - - newGroup = execCommand( new CreateExplicitGroupCommand(req, findDataverseOrDie(dvIdtf), newGroup)); - + + newGroup = execCommand(new CreateExplicitGroupCommand(req, findDataverseOrDie(dvIdtf), newGroup)); + String groupUri = String.format("%s/groups/%s", dvIdtf, newGroup.getGroupAliasInOwner()); - return created( groupUri, json(newGroup) ); + return created(groupUri, json(newGroup)); }); } - + @GET - @Path("{identifier}/groups/") - public Response listGroups( @PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey ) { - return response( req -> ok( - execCommand(new ListExplicitGroupsCommand(req, findDataverseOrDie(dvIdtf))) - .stream().map( eg->json(eg)) - .collect( toJsonArray() ) + @Path("{identifier}/groups/") + public Response listGroups(@PathParam("identifier") String dvIdtf, @QueryParam("key") String apiKey) { + return response(req -> ok( + execCommand(new ListExplicitGroupsCommand(req, findDataverseOrDie(dvIdtf))) + .stream().map(eg -> json(eg)) + .collect(toJsonArray()) )); } - + @GET - @Path("{identifier}/groups/{aliasInOwner}") - public Response getGroupByOwnerAndAliasInOwner( @PathParam("identifier") String dvIdtf, - @PathParam("aliasInOwner") String grpAliasInOwner ){ - return response( req -> ok(json(findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), - req, - grpAliasInOwner)))); - } - + @Path("{identifier}/groups/{aliasInOwner}") + public Response getGroupByOwnerAndAliasInOwner(@PathParam("identifier") String dvIdtf, + @PathParam("aliasInOwner") String grpAliasInOwner) { + return response(req -> ok(json(findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), + req, + grpAliasInOwner)))); + } + @PUT - @Path("{identifier}/groups/{aliasInOwner}") - public Response updateGroup(ExplicitGroupDTO groupDto, - @PathParam("identifier") String dvIdtf, - @PathParam("aliasInOwner") String grpAliasInOwner ) - { - return response( req-> ok(json(execCommand( - new UpdateExplicitGroupCommand(req, - groupDto.apply( findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner))))))); - } - + @Path("{identifier}/groups/{aliasInOwner}") + public Response updateGroup(ExplicitGroupDTO groupDto, + @PathParam("identifier") String dvIdtf, + @PathParam("aliasInOwner") String grpAliasInOwner) { + return response(req -> ok(json(execCommand( + new UpdateExplicitGroupCommand(req, + groupDto.apply(findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner))))))); + } + @DELETE - @Path("{identifier}/groups/{aliasInOwner}") + @Path("{identifier}/groups/{aliasInOwner}") public Response deleteGroup(@PathParam("identifier") String dvIdtf, - @PathParam("aliasInOwner") String grpAliasInOwner ) - { - return response( req -> { - execCommand( new DeleteExplicitGroupCommand(req, - findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner)) ); - return ok( "Group " + dvIdtf + "/" + grpAliasInOwner + " deleted" ); + @PathParam("aliasInOwner") String grpAliasInOwner) { + return response(req -> { + execCommand(new DeleteExplicitGroupCommand(req, + findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner))); + return ok("Group " + dvIdtf + "/" + grpAliasInOwner + " deleted"); }); } - + @POST - @Path("{identifier}/groups/{aliasInOwner}/roleAssignees") + @Path("{identifier}/groups/{aliasInOwner}/roleAssignees") @Consumes("application/json") - public Response addRoleAssingees(List roleAssingeeIdentifiers, - @PathParam("identifier") String dvIdtf, - @PathParam("aliasInOwner") String grpAliasInOwner) - { - return response( req -> ok( - json( - execCommand( - new AddRoleAssigneesToExplicitGroupCommand(req, - findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner), - new TreeSet<>(roleAssingeeIdentifiers)))))); - } - + public Response addRoleAssingees(List roleAssingeeIdentifiers, + @PathParam("identifier") String dvIdtf, + @PathParam("aliasInOwner") String grpAliasInOwner) { + return response(req -> ok( + json( + execCommand( + new AddRoleAssigneesToExplicitGroupCommand(req, + findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner), + new TreeSet<>(roleAssingeeIdentifiers)))))); + } + @PUT - @Path("{identifier}/groups/{aliasInOwner}/roleAssignees/{roleAssigneeIdentifier: .*}") - public Response addRoleAssingee( @PathParam("identifier") String dvIdtf, - @PathParam("aliasInOwner") String grpAliasInOwner, - @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier) { + @Path("{identifier}/groups/{aliasInOwner}/roleAssignees/{roleAssigneeIdentifier: .*}") + public Response addRoleAssingee(@PathParam("identifier") String dvIdtf, + @PathParam("aliasInOwner") String grpAliasInOwner, + @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier) { return addRoleAssingees(Collections.singletonList(roleAssigneeIdentifier), dvIdtf, grpAliasInOwner); } - + @DELETE - @Path("{identifier}/groups/{aliasInOwner}/roleAssignees/{roleAssigneeIdentifier: .*}") - public Response deleteRoleAssingee( @PathParam("identifier") String dvIdtf, - @PathParam("aliasInOwner") String grpAliasInOwner, - @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier ) { - return response( req ->ok(json(execCommand( - new RemoveRoleAssigneesFromExplicitGroupCommand(req, - findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner), - Collections.singleton(roleAssigneeIdentifier)))))); - } - - private ExplicitGroup findExplicitGroupOrDie( DvObject dv, DataverseRequest req, String groupIdtf ) throws WrappedResponse { - ExplicitGroup eg = execCommand(new GetExplicitGroupCommand(req, dv, groupIdtf) ); - if ( eg == null ) throw new WrappedResponse( notFound("Can't find " + groupIdtf + " in dataverse " + dv.getId())); + @Path("{identifier}/groups/{aliasInOwner}/roleAssignees/{roleAssigneeIdentifier: .*}") + public Response deleteRoleAssingee(@PathParam("identifier") String dvIdtf, + @PathParam("aliasInOwner") String grpAliasInOwner, + @PathParam("roleAssigneeIdentifier") String roleAssigneeIdentifier) { + return response(req -> ok(json(execCommand( + new RemoveRoleAssigneesFromExplicitGroupCommand(req, + findExplicitGroupOrDie(findDataverseOrDie(dvIdtf), req, grpAliasInOwner), + Collections.singleton(roleAssigneeIdentifier)))))); + } + + private ExplicitGroup findExplicitGroupOrDie(DvObject dv, DataverseRequest req, String groupIdtf) throws WrappedResponse { + ExplicitGroup eg = execCommand(new GetExplicitGroupCommand(req, dv, groupIdtf)); + if (eg == null) { + throw new WrappedResponse(notFound("Can't find " + groupIdtf + " in dataverse " + dv.getId())); + } return eg; } @GET @Path("{identifier}/links") - public Response listLinks(@PathParam("identifier") String dvIdtf ) { + public Response listLinks(@PathParam("identifier") String dvIdtf) { try { User u = findUserOrDie(); Dataverse dv = findDataverseOrDie(dvIdtf); @@ -833,42 +844,42 @@ public Response listLinks(@PathParam("identifier") String dvIdtf ) { return wr.getResponse(); } } - + @POST - @Path("{id}/move/{targetDataverseAlias}") - public Response moveDataverse(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) { - try{ - User u = findUserOrDie(); + @Path("{id}/move/{targetDataverseAlias}") + public Response moveDataverse(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) { + try { + User u = findUserOrDie(); Dataverse dv = findDataverseOrDie(id); Dataverse target = findDataverseOrDie(targetDataverseAlias); - if (target == null){ + if (target == null) { return error(Response.Status.BAD_REQUEST, "Target Dataverse not found."); - } + } execCommand(new MoveDataverseCommand( createDataverseRequest(u), dv, target, force - )); + )); return ok("Dataverse moved successfully"); } catch (WrappedResponse ex) { return ex.getResponse(); } } - + @PUT - @Path("{linkedDataverseAlias}/link/{linkingDataverseAlias}") - public Response linkDataverse(@PathParam("linkedDataverseAlias") String linkedDataverseAlias, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) { - try{ - User u = findUserOrDie(); + @Path("{linkedDataverseAlias}/link/{linkingDataverseAlias}") + public Response linkDataverse(@PathParam("linkedDataverseAlias") String linkedDataverseAlias, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) { + try { + User u = findUserOrDie(); Dataverse linked = findDataverseOrDie(linkedDataverseAlias); Dataverse linking = findDataverseOrDie(linkingDataverseAlias); - if (linked == null){ + if (linked == null) { return error(Response.Status.BAD_REQUEST, "Linked Dataverse not found."); - } - if (linking == null){ + } + if (linking == null) { return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found."); - } + } execCommand(new LinkDataverseCommand( createDataverseRequest(u), linking, linked - )); + )); return ok("Dataverse " + linked.getAlias() + " linked successfully to " + linking.getAlias()); } catch (WrappedResponse ex) { return ex.getResponse(); diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java index 89cb58b04c7..f5a9f8bb58b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/ipaddress/IpGroupsServiceBean.java @@ -123,7 +123,8 @@ public Set findAllIncludingIp( IpAddress ipa ) { .setParameter("d", ip6arr[3]) .getResultList(); cached = new HashSet<>(groupList); - return groupCache.put(ipa, cached); + groupCache.put(ipa, cached); + return cached; } else { throw new IllegalArgumentException( "Unknown IpAddress type: " + ipa.getClass() + " (for IpAddress:" + ipa + ")" ); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java deleted file mode 100644 index 0f719155135..00000000000 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListDataverseContentCommand.java +++ /dev/null @@ -1,76 +0,0 @@ -package edu.harvard.iq.dataverse.engine.command.impl; - -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.Dataverse; -import edu.harvard.iq.dataverse.DvObject; -import edu.harvard.iq.dataverse.authorization.Permission; -import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; -import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; -import edu.harvard.iq.dataverse.engine.command.CommandContext; -import edu.harvard.iq.dataverse.engine.command.DataverseRequest; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.logging.Logger; - -/** - * Lists the content of a dataverse - both datasets and dataverses. - * - * @author michael - */ -// no annotations here, since permissions are dynamically decided -public class ListDataverseContentCommand extends AbstractCommand> { - - private static final Logger logger = Logger.getLogger(ListDataverseContentCommand.class.getName()); - - private final Dataverse dvToList; - - public ListDataverseContentCommand(DataverseRequest aRequest, Dataverse anAffectedDataverse) { - super(aRequest, anAffectedDataverse); - dvToList = anAffectedDataverse; - } - - @Override - public List execute(CommandContext ctxt) throws CommandException { - LinkedList result = new LinkedList<>(); - User user = getRequest().getUser(); - if (user.isSuperuser()) { - result.addAll(ctxt.datasets().findByOwnerId(dvToList.getId())); - result.addAll(ctxt.dataverses().findByOwnerId(dvToList.getId())); - } else { - List datasets = ctxt.datasets().findByOwnerId(dvToList.getId()); - int i = 0; - long t0 = System.currentTimeMillis(); - for (Dataset ds : datasets) { - i++; - logger.info("On "+i+" out of " + datasets.size()); - if (ds.isReleased() || ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { - result.add(ds); - } - } - logger.info(""+(System.currentTimeMillis()-t0)); - List dataverses = ctxt.dataverses().findByOwnerId(dvToList.getId()); - for (Dataverse dv : dataverses) { - i++; - logger.info("On "+i+" out of " + (datasets.size()+dataverses.size())); - if (dv.isReleased() || ctxt.permissions().requestOn(getRequest(), dv).has(Permission.ViewUnpublishedDataverse)) { - result.add(dv); - } - } - logger.info(""+(System.currentTimeMillis()-t0)); - } - return result; - } - - @Override - public Map> getRequiredPermissions() { - return Collections.singletonMap("", - dvToList.isReleased() ? Collections.emptySet() - : Collections.singleton(Permission.ViewUnpublishedDataverse)); - } - -} diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java index 7cd0df846c2..646a59f6641 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/DataRetrieverAPI.java @@ -141,23 +141,7 @@ public String retrieveTestPager(@QueryParam("selectedPage") int selectedPage){ public boolean isSuperuser(){ - - // Is this an authenticated user? - // - if ((session.getUser() == null)||(!session.getUser().isAuthenticated())){ - return false; - } - - // Is this a user? - // - authUser = (AuthenticatedUser)session.getUser(); - if (authUser==null){ - return false; - } - - // Is this a superuser? - // - return authUser.isSuperuser(); + return (session.getUser() != null) && session.getUser().isSuperuser(); } private AuthenticatedUser getUserFromIdentifier(String userIdentifier){ @@ -547,46 +531,4 @@ private JsonArrayBuilder formatSolrDocs(SolrQueryResponse solrResponse, RoleTagR return jsonSolrDocsArrayBuilder; } - - - /*private JsonArrayBuilder formatSolrDocs(SolrQueryResponse solrResponse, MyDataFilterParams filterParams, MyDataFinder finder ){ - - if (solrResponse == null){ - logger.severe("DataRetrieverAPI.getDvObjectTypeCounts: formatSolrDocs should not be null"); - return null; - } - JsonArrayBuilder jsonSolrDocsArrayBuilder = Json.createArrayBuilder(); - - for (SolrSearchResult doc : solrQueryResponse.getSolrSearchResults()){ - - if( authUser!= null){ - doc.setUserRole(myDataQueryHelperServiceBean.getRolesOnDVO(authUser, doc.getEntityId(), filterParams.getRoleIds(), finder)); - } - jsonSolrDocsArrayBuilder.add(doc.getJsonForMyData()); - } - return jsonSolrDocsArrayBuilder; - - } - */ - /* - @Path("test-it") - @Produces({"application/json"}) - @GET - public String retrieveMyData(@QueryParam("key") String keyValue){ //String myDataParams) { - - final JsonObjectBuilder jsonData = Json.createObjectBuilder(); - jsonData.add("name", keyValue); - return jsonData.build().toString(); - } - */ - - private void msg(String s){ - //System.out.println(s); - } - - private void msgt(String s){ - msg("-------------------------------"); - msg(s); - msg("-------------------------------"); - } } \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java index 628249427c1..ad395598f45 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java @@ -86,7 +86,6 @@ public class MyDataFinder { public MyDataFinder(DataverseRolePermissionHelper rolePermissionHelper, RoleAssigneeServiceBean roleAssigneeService, DvObjectServiceBean dvObjectServiceBean, GroupServiceBean groupService) { - this.msgt("MyDataFinder, constructor"); this.rolePermissionHelper = rolePermissionHelper; this.roleAssigneeService = roleAssigneeService; this.dvObjectServiceBean = dvObjectServiceBean; @@ -158,26 +157,6 @@ public void initFields(){ public DataverseRolePermissionHelper getRolePermissionHelper(){ return this.rolePermissionHelper; } - /* - private ArrayList dataverseIds; - private ArrayList primaryDatasetIds; - private ArrayList primaryFileIds; - private ArrayList parentIds; - */ - - /*public void runFindDataSteps(String userIdentifier){ - this.userIdentifier = userIdentifier; - msgt("runFindDataSteps: " + userIdentifier); - if (!runStep1RoleAssignments()){ - return; - } - if (!runStep2DirectAssignments()){ - return; - } - if (!fileGrandparentFileIds.isEmpty()){ - runStep3FilePermsAssignedAtDataverse(); - } - }*/ public void runFindDataSteps(MyDataFilterParams filterParams){ @@ -220,7 +199,7 @@ public List getSolrFilterQueries(){ */ private List getSolrFilterQueries(boolean totalCountsOnly){ if (this.hasError()){ - throw new IllegalStateException("Error encountered earlier. Before calling this method on a MyDataFinder object, first check 'hasError()'"); + throw new IllegalStateException("Error encjountered earlier. Before calling this method on a MyDataFinder object, first check 'hasError()'"); } // init filterQueries list @@ -298,14 +277,11 @@ public String getSolrDvObjectFilterQuery(){ Set distinctParentIds = new HashSet<>(parentIds); - if ((distinctEntityIds.size() == 0) && (distinctParentIds.size() == 0)) { + if ((distinctEntityIds.isEmpty()) && (distinctParentIds.isEmpty())) { this.addErrorMessage(DataRetrieverAPI.MSG_NO_RESULTS_FOUND); return null; } - - msg("distinctEntityIds (1): " + distinctEntityIds.size()); - msg("distinctParentIds: " + distinctParentIds.size()); - + // See if we can trim down the list of distinctEntityIds // If we have the parent of a distinctEntityId in distinctParentIds, // then we query it via the parent @@ -331,9 +307,7 @@ public String getSolrDvObjectFilterQuery(){ } // Set the distinctEntityIds to the finalDirectEntityIds //distinctEntityIds = new HashSet<>(distinctEntityIds); - distinctEntityIds = new HashSet<>(finalDirectEntityIds); - - msg("distinctEntityIds (2): " + distinctEntityIds.size()); + distinctEntityIds = new HashSet<>(finalDirectEntityIds); // Start up a SolrQueryFormatter for building clauses // @@ -508,7 +482,6 @@ private boolean runStep2DirectAssignments(){ //msgt("runStep2DirectAssignments"); List results = this.dvObjectServiceBean.getDvObjectInfoForMyData(directDvObjectIds); - msgt("runStep2DirectAssignments number of results: " + results.size()); //List results = this.roleAssigneeService.getAssignmentsFor(this.userIdentifier); if (results.isEmpty()){ this.addErrorMessage("Sorry, you have no assigned Dataverses, Datasets, or Files."); @@ -582,13 +555,11 @@ private boolean runStep2DirectAssignments(){ private boolean runStep3FilePermsAssignedAtDataverse(){ - msgt("runStep3FilePermsAssignedAtDataverse"); if ((this.fileGrandparentFileIds == null)||(this.fileGrandparentFileIds.isEmpty())){ return true; } List results = this.dvObjectServiceBean.getDvObjectInfoByParentIdForMyData(this.fileGrandparentFileIds); - msg("runStep3FilePermsAssignedAtDataverse results count: " + results.size()); /* SEK 07/09 Ticket 2329 Removed failure for empty results - if there are none let it go */ @@ -619,15 +590,6 @@ private boolean runStep3FilePermsAssignedAtDataverse(){ return true; } - /* - private void postStep2Cleanup(){ - // Clear step1 lookups - idsWithDataversePermissions = null; - idsWithDatasetPermissions = null; - idsWithFilePermissions = null; - directDvObjectIds = null; // Direct ids no longer needed - }*/ - public boolean hasError(){ return this.errorFound; @@ -639,15 +601,4 @@ private void addErrorMessage(String s){ this.errorFound = true; this.errorMessage = s; } - - private void msg(String s){ - //logger.fine(s); - } - - private void msgt(String s){ - msg("-------------------------------"); - msg(s); - msg("-------------------------------"); - } - } // end: MyDataFinder From a818aeada10555b47f1cfc8997b21aef0f55ae72 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 26 Jul 2018 10:22:49 -0400 Subject: [PATCH 034/701] #4610 remove script lookup from Create DS command --- .../engine/command/impl/AbstractCreateDatasetCommand.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java index 9c521625b48..9ebc816a9cf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractCreateDatasetCommand.java @@ -133,6 +133,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { ctxt.index().indexDataset(theDataset, true); ctxt.solrIndex().indexPermissionsOnSelfAndChildren(theDataset.getId()); + /* if (DataCaptureModuleUtil.rsyncSupportEnabled(ctxt.settings().getValueForKey(SettingsServiceBean.Key.UploadMethods))) { logger.fine("Requesting rsync support."); try { @@ -142,8 +143,7 @@ public Dataset execute(CommandContext ctxt) throws CommandException { logger.log(Level.WARNING, "Problem getting rsync script: {0}", ex.getLocalizedMessage()); } logger.fine("Done with rsync request."); - } - + }*/ return theDataset; } From 08fcd1001121589a57daa2c1a4ef9709d1c32ba4 Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Thu, 26 Jul 2018 15:03:26 -0400 Subject: [PATCH 035/701] still broken --- .../harvard/iq/dataverse/api/Dataverses.java | 75 ++++++++++--------- 1 file changed, 38 insertions(+), 37 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a05f4f9d00c..e7fe4bd9c14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseFacet; import edu.harvard.iq.dataverse.DataverseContact; +import edu.harvard.iq.dataverse.DataverseServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.GlobalId; @@ -52,12 +53,14 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateExplicitGroupCommand; import edu.harvard.iq.dataverse.search.SearchException; +import edu.harvard.iq.dataverse.search.SearchServiceBean; import edu.harvard.iq.dataverse.search.SolrQueryResponse; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.StringUtil; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; import edu.harvard.iq.dataverse.util.json.JsonParseException; +import edu.harvard.iq.dataverse.util.json.JsonPrinter; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; import java.io.StringReader; import java.util.Collections; @@ -111,6 +114,12 @@ public class Dataverses extends AbstractApiBean { @EJB ExplicitGroupServiceBean explicitGroupSvc; + + @EJB + SearchServiceBean search; + + @EJB + DataverseServiceBean dataverseServiceBean; // @EJB // SystemConfig systemConfig; @@ -484,48 +493,40 @@ public Response setFacets(@PathParam("identifier") String dvIdtf, String facetId // to be called without a PermissionException being thrown. @GET @Path("{identifier}/contents") - public Response listContent(@PathParam("identifier") String dvIdtf) { - DvObject.Visitor ser = new DvObject.Visitor() { - - @Override - public JsonObjectBuilder visit(Dataverse dv) { - return Json.createObjectBuilder() - .add("type", "dataverse") - .add("id", dv.getId()) - .add("title", dv.getName()); - } - - @Override - public JsonObjectBuilder visit(Dataset ds) { - return json(ds).add("type", "dataset"); - } - - @Override - public JsonObjectBuilder visit(DataFile df) { - throw new UnsupportedOperationException("Files don't live directly in Dataverses"); - } - }; - - SolrQueryResponse solrQueryResponse; + public Response listContent(@PathParam("identifier") String dvIdtf) throws WrappedResponse { + List searchResults; + DataverseRequest req = null; try { - solrQueryResponse = cxtx.search().search(req, findDataverseOrDie(dvIdtf), "*", Arrays.asList("parentIdentifier:" + dvIdtf), "sort=nameSort", "order=desc", 0, true, 0); + searchResults = search.search(req, findDataverseOrDie(dvIdtf), "*", Arrays.asList("parentIdentifier:" + dvIdtf), "sort=nameSort", "order=desc", 0, true, 0).getSolrSearchResults(); } catch (SearchException ex) { - logger.severe("could not connect to Solr"); - return null; + logger.severe("Could not get results from solr"); + return allowCors(error(Status.INTERNAL_SERVER_ERROR, "Could not get results from solr")); } - - List solrSearchResults = solrQueryResponse.getSolrSearchResults(); - for (SolrSearchResult result : solrSearchResults) { - if(result.getType()=="dataverse"){ - } + String rootDataverseName = dataverseServiceBean.findRootDataverse().getName(); + return response(q -> ok(searchResults.stream().map(result -> (JsonObjectBuilder) solrResultToJson(result, rootDataverseName)).collect(toJsonArray()))); + } + + public JsonObjectBuilder solrResultToJson(SolrSearchResult res, String rootDataverseName) { + new Dataset().getPublicationDateFormattedYYYYMMDD(); + if (res.getType()=="dataverse"){ + return Json.createObjectBuilder() + .add("type", "dataverse") + .add("id", res.getId()) + .add("title", res.getName()); + }else if (res.getType()=="dataset"){ + return Json.createObjectBuilder() + .add("id", res.getId()) + .add("identifier", res.getIdentifier()) + .add("persistentUrl", res.getPersistentUrl()) + .add("protocol", res.getProtocol()) + .add("authority", res.getAuthority()) + .add("publisher", rootDataverseName) + .add("publicationDate", res.getPublicationDateFormattedYYYYMMDD()) + .add("storageIdentifier", res.getStorageIdentifier()); } - return allowCors(response(req -> ok( - execCommand(new ListDataverseContentCommand(req, findDataverseOrDie(dvIdtf))) - .stream().map(dvo -> (JsonObjectBuilder) dvo.accept(ser)) - .collect(toJsonArray())) - )); + return null; } - + @GET @Path("{identifier}/roles") public Response listRoles(@PathParam("identifier") String dvIdtf) { From 730b2192d2f9a965a8f4e04ccaf978fcaa334544 Mon Sep 17 00:00:00 2001 From: oscardssmith Date: Fri, 27 Jul 2018 16:11:19 -0400 Subject: [PATCH 036/701] now it runs out of memory --- .../edu/harvard/iq/dataverse/Dataverse.java | 2 +- .../iq/dataverse/DataverseServiceBean.java | 11 +++ .../harvard/iq/dataverse/api/Dataverses.java | 69 +++++++++---------- .../impl/shib/ShibGroupServiceBean.java | 2 +- 4 files changed, 47 insertions(+), 37 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index ac2049eb0a2..53fe82a6c8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -41,6 +41,7 @@ */ @NamedQueries({ @NamedQuery(name = "Dataverse.ownedObjectsById", query = "SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id"), + @NamedQuery(name = "Dataverse.findDirectChildren", query = "SELECT dvo FROM DvObject dvo WHERE dvo.owner.identifier=:identifier"), @NamedQuery(name = "Dataverse.findRoot", query = "SELECT d FROM Dataverse d where d.owner.id=null"), @NamedQuery(name = "Dataverse.findByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias)=:alias"), @NamedQuery(name = "Dataverse.filterByAlias", query="SELECT dv FROM Dataverse dv WHERE LOWER(dv.alias) LIKE :alias order by dv.alias"), @@ -745,5 +746,4 @@ public boolean isAncestorOf( DvObject other ) { } return false; } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index d698c71b7f9..df531256c81 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -179,6 +179,17 @@ public Dataverse findByAlias(String anAlias) { return null; } } + + public List findDirectChildren(String identifier) { + try { + return em.createNamedQuery("Dataverse.findDirectChildren", DvObject.class) + .setParameter("identifier", identifier.toLowerCase()) + .getResultList(); + } catch ( NoResultException|NonUniqueResultException ex ) { + logger.fine("Unable to find the children of dataverse using identifier \"" + identifier + "\": " + ex); + return null; + } + } public boolean hasData( Dataverse dv ) { TypedQuery amountQry = em.createNamedQuery("Dataverse.ownedObjectsById", Long.class) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index e7fe4bd9c14..d8c07abe22f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -53,14 +53,12 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseMetadataBlocksCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateExplicitGroupCommand; import edu.harvard.iq.dataverse.search.SearchException; +import edu.harvard.iq.dataverse.search.SearchFields; import edu.harvard.iq.dataverse.search.SearchServiceBean; -import edu.harvard.iq.dataverse.search.SolrQueryResponse; -import edu.harvard.iq.dataverse.search.SolrSearchResult; -import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.search.SortBy; import edu.harvard.iq.dataverse.util.StringUtil; import static edu.harvard.iq.dataverse.util.StringUtil.nonEmpty; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.json.JsonPrinter; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; import java.io.StringReader; import java.util.Collections; @@ -99,7 +97,9 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import java.util.Arrays; import java.util.Date; +import java.util.Map; import java.util.Optional; +import java.util.stream.Collectors; /** * A REST API for dataverses. @@ -494,37 +494,36 @@ public Response setFacets(@PathParam("identifier") String dvIdtf, String facetId @GET @Path("{identifier}/contents") public Response listContent(@PathParam("identifier") String dvIdtf) throws WrappedResponse { - List searchResults; - DataverseRequest req = null; - try { - searchResults = search.search(req, findDataverseOrDie(dvIdtf), "*", Arrays.asList("parentIdentifier:" + dvIdtf), "sort=nameSort", "order=desc", 0, true, 0).getSolrSearchResults(); - } catch (SearchException ex) { - logger.severe("Could not get results from solr"); - return allowCors(error(Status.INTERNAL_SERVER_ERROR, "Could not get results from solr")); - } - String rootDataverseName = dataverseServiceBean.findRootDataverse().getName(); - return response(q -> ok(searchResults.stream().map(result -> (JsonObjectBuilder) solrResultToJson(result, rootDataverseName)).collect(toJsonArray()))); - } - - public JsonObjectBuilder solrResultToJson(SolrSearchResult res, String rootDataverseName) { - new Dataset().getPublicationDateFormattedYYYYMMDD(); - if (res.getType()=="dataverse"){ - return Json.createObjectBuilder() - .add("type", "dataverse") - .add("id", res.getId()) - .add("title", res.getName()); - }else if (res.getType()=="dataset"){ - return Json.createObjectBuilder() - .add("id", res.getId()) - .add("identifier", res.getIdentifier()) - .add("persistentUrl", res.getPersistentUrl()) - .add("protocol", res.getProtocol()) - .add("authority", res.getAuthority()) - .add("publisher", rootDataverseName) - .add("publicationDate", res.getPublicationDateFormattedYYYYMMDD()) - .add("storageIdentifier", res.getStorageIdentifier()); - } - return null; + + DvObject.Visitor ser = new DvObject.Visitor() { + + @Override + public JsonObjectBuilder visit(Dataverse dv) { + return Json.createObjectBuilder().add("type", "dataverse") + .add("id", dv.getId()) + .add("title",dv.getName() ); + } + + @Override + public JsonObjectBuilder visit(Dataset ds) { + return json(ds).add("type", "dataset"); + } + + @Override + public JsonObjectBuilder visit(DataFile df) { throw new UnsupportedOperationException("Files don't live directly in Dataverses"); } + }; + + List children = dataverseServiceBean.findDirectChildren(dvIdtf); + Map childMap= children.stream().collect(Collectors.toMap (dvo -> dvo.getIdentifier(), dvo -> dvo)); + + return allowCors(response((DataverseRequest req) -> { + try { + return ok(search.search(req, findDataverseOrDie(dvIdtf), "*", Arrays.asList(SearchFields.PARENT_ID + dvIdtf), SearchFields.NAME_SORT, SortBy.DESCENDING, 0, false, Integer.MAX_VALUE) + .getSolrSearchResults().stream().map(result -> (JsonObjectBuilder) childMap.get(result.getId()).accept(ser)).collect(toJsonArray())); + } catch (SearchException ex) { + return error(Status.INTERNAL_SERVER_ERROR, "Could not get results from solr"); + } + })); } @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java index eb7778b84c6..de35d339a56 100644 --- a/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/authorization/groups/impl/shib/ShibGroupServiceBean.java @@ -79,10 +79,10 @@ public ShibGroup save(String name, String shibIdpAttribute, String shibIdp) { public Set findFor(AuthenticatedUser authenticatedUser) { Set groupsForUser = groupCache.get(authenticatedUser); if (groupsForUser == null) { + groupsForUser = new HashSet<>(); String shibIdp = authenticatedUser.getShibIdentityProvider(); logger.fine("IdP for user " + authenticatedUser.getIdentifier() + " is " + shibIdp); if (shibIdp != null) { - groupsForUser = new HashSet<>(); /** * @todo Rather than a straight string equality match, we have a * requirement to support regular expressions: From 4cc643c7a8e60d726628559f4e629fd09c899e6d Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Mon, 30 Jul 2018 17:08:41 -0400 Subject: [PATCH 037/701] Fixed rsync panel collapse toggle. Various code clean up. [ref #4610] --- src/main/java/Bundle.properties | 2 +- src/main/webapp/editFilesFragment.xhtml | 160 +++++++++++------------- 2 files changed, 74 insertions(+), 88 deletions(-) diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index 6514ac72a47..18dbd0e858f 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1316,7 +1316,7 @@ file.selectToAdd.dragdropMsg=Drag and drop files here. file.createUploadDisabled=rsync allows you to run a script that transfers your files into or out of Dataverse via SSH. It is useful for extremely large files, or packages containing a large number of files. Once you have saved this dataset, you can upload your data using rsync via the "Upload Files" button on the dataset page. file.fromDropbox=Upload from Dropbox file.fromDropbox.tip=Files can also be uploaded directly from Dropbox. -file.fromRsync=Upload with rsync+ssh via Data Capture Module (DCM) +file.fromRsync=Upload with rsync+SSH via Data Capture Module (DCM) file.fromHTTP=Upload with HTTP via your browser file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. file.api.alreadyHasPackageFile=File upload via HTTP disabled since this dataset already contains a package file. diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 060e00aa1c1..c83aba6ed95 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -27,23 +27,20 @@ -
+
-