diff --git a/doc/Sphinx/source/img/image1institutional.png b/doc/Sphinx/source/img/image1institutional.png
deleted file mode 100755
index 39852ea1c23..00000000000
Binary files a/doc/Sphinx/source/img/image1institutional.png and /dev/null differ
diff --git a/doc/Sphinx/source/img/image2institutional.png b/doc/Sphinx/source/img/image2institutional.png
deleted file mode 100755
index a14fded0cc3..00000000000
Binary files a/doc/Sphinx/source/img/image2institutional.png and /dev/null differ
diff --git a/doc/Sphinx/source/img/image3institutional.png b/doc/Sphinx/source/img/image3institutional.png
deleted file mode 100755
index bc213a656f9..00000000000
Binary files a/doc/Sphinx/source/img/image3institutional.png and /dev/null differ
diff --git a/doc/Sphinx/source/img/image4institutional.png b/doc/Sphinx/source/img/image4institutional.png
deleted file mode 100755
index 40f1c03d1d7..00000000000
Binary files a/doc/Sphinx/source/img/image4institutional.png and /dev/null differ
diff --git a/doc/shib/shib.md b/doc/shib/shib.md
index 5d2b3a55858..f29f9cc485e 100644
--- a/doc/shib/shib.md
+++ b/doc/shib/shib.md
@@ -6,8 +6,6 @@ FIXME: merge with what's in the Installation Guide: http://guides.dataverse.org/
## Set up a valid SSL cert
-See also notes on setting up the SSL cert for https://apitest.dataverse.org at https://github.com/IQSS/dataverse/tree/master/scripts/deploy/apitest.dataverse.org
-
### Create a private key
[root@dvn-vm3 ~]# openssl genrsa -out /root/cert/shibtest.dataverse.org.key 2048
diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst
index e7dfb0bf46a..28a46cf58e5 100755
--- a/doc/sphinx-guides/source/admin/index.rst
+++ b/doc/sphinx-guides/source/admin/index.rst
@@ -13,9 +13,8 @@ These "superuser" tasks are managed via the new page called the Dashboard. A use
Contents:
.. toctree::
- :maxdepth: 2
- harvestclients
- harvestserver
- metadataexport
- timers
+ harvestclients
+ harvestserver
+ metadataexport
+ timers
diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst
index 7bb2805b99a..66ea551d446 100755
--- a/doc/sphinx-guides/source/api/dataaccess.rst
+++ b/doc/sphinx-guides/source/api/dataaccess.rst
@@ -60,11 +60,11 @@ Multiple File ("bundle") download
Returns the files listed, zipped.
Parameters:
-~~~~~~~~~~
+~~~~~~~~~~~
none.
"All Formats" bundled access for Tabular Files.
-----------------------------------------------
+-----------------------------------------------
``/api/access/datafile/bundle/$id``
@@ -78,7 +78,7 @@ It returns a zipped bundle that contains the data in the following formats:
* File citation, in Endnote and RIS formats.
Parameters:
-~~~~~~~~~~
+~~~~~~~~~~~
none.
Data Variable Metadata Access
diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst
index 152ea180236..b9d30d20e91 100755
--- a/doc/sphinx-guides/source/api/index.rst
+++ b/doc/sphinx-guides/source/api/index.rst
@@ -11,16 +11,15 @@ interoperate with the Dataverse to utilize our
APIs. In 4.0, we require to get a token, by simply registering for a Dataverse account, before using our APIs
(We are considering making some of the APIs completely public in the future - no token required - if you use it only a few times).
-Rather than using a production installation of Dataverse, API users should use http://apitest.dataverse.org for testing.
+Rather than using a production installation of Dataverse, API users are welcome to use http://demo.dataverse.org for testing.
Contents:
.. toctree::
- :maxdepth: 2
- sword
- search
- dataaccess
- native-api
- client-libraries
- apps
+ sword
+ search
+ dataaccess
+ native-api
+ client-libraries
+ apps
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 59af4f00eee..8b686df66cd 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -89,7 +89,7 @@ Publish the Dataverse pointed by ``identifier``, which can either by the dataver
Datasets
~~~~~~~~
-**Note** Creation of new datasets is done by ``POST``ing them onto dataverses. See dataverse section.
+**Note** Creation of new datasets is done with a ``POST`` onto dataverses. See dataverse section.
**Note** In all commands below, dataset versions can be referred to as:
@@ -125,12 +125,12 @@ List versions of the dataset::
Show a version of the dataset. The Dataset also include any metadata blocks the data might have::
GET http://$SERVER/api/datasets/$id/versions/$versionNumber?key=$apiKey
-
-
+
+
Export the metadata of the current published version of a dataset in various formats see Note below::
GET http://$SERVER/api/datasets/export?exporter=ddi&persistentId=$persistentId
-
+
Note: Supported exporters (export formats) are ddi, oai_ddi, dcterms, oai_dc, and dataverse_json.
@@ -163,9 +163,9 @@ To revert to the default logic, use ``:publicationDate`` as the ``$datasetFieldT
Note that the dataset field used has to be a date field::
PUT http://$SERVER/api/datasets/$id/citationdate?key=$apiKey
-
+
Restores the default logic of the field type to be used as the citation date. Same as ``PUT`` with ``:publicationDate`` body::
-
+
DELETE http://$SERVER/api/datasets/$id/citationdate?key=$apiKey
List all the role assignments at the given dataset::
@@ -185,7 +185,7 @@ Delete a Private URL from a dataset (if it exists)::
DELETE http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey
Builtin Users
-~~~~~
+~~~~~~~~~~~~~
This endopint deals with users of the built-in authentication provider. Note that users may come from other authentication services as well, such as Shibboleth.
For this service to work, the setting ``BuiltinUsers.KEY`` has to be set, and its value passed as ``key`` to
@@ -368,18 +368,29 @@ Toggles superuser mode on the ``AuthenticatedUser`` whose ``identifier`` (withou
POST http://$SERVER/api/admin/superuser/$identifier
+List all role assignments of a role assignee (i.e. a user or a group)::
+
+ GET http://$SERVER/api/admin/assignments/assignees/$identifier
+
+Note that ``identifier`` can contain slashes (e.g. ``&ip/localhost-users``).
+
IpGroups
^^^^^^^^
-List all the ip groups::
+Lists all the ip groups::
GET http://$SERVER/api/admin/groups/ip
-Adds a new ip group. POST data should specify the group in JSON format. Examples are available at ``data/ipGroup1.json``. ::
+Adds a new ip group. POST data should specify the group in JSON format. Examples are available at the ``data`` folder. Using this method, an IP Group is always created, but its ``alias`` might be different than the one appearing in the
+JSON file, to ensure it is unique. ::
POST http://$SERVER/api/admin/groups/ip
-Returns a the group in a JSON format. ``groupIdtf`` can either be the group id in the database (in case it is numeric), or the group alias. ::
+Creates or updates the ip group ``$groupAlias``. ::
+
+ POST http://$SERVER/api/admin/groups/ip/$groupAlias
+
+Returns a the group in a JSON format. ``$groupIdtf`` can either be the group id in the database (in case it is numeric), or the group alias. ::
GET http://$SERVER/api/admin/groups/ip/$groupIdtf
diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst
index 84f202a5b83..2f7090a327b 100755
--- a/doc/sphinx-guides/source/api/search.rst
+++ b/doc/sphinx-guides/source/api/search.rst
@@ -20,11 +20,11 @@ Parameters
============== ======= ===========
Name Type Description
============== ======= ===========
-q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://apitest.dataverse.org/api/search?q=title:data
-type string Can be either "dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://apitest.dataverse.org/api/search?q=*&type=dataset
-subtree string The identifier of the dataverse to which the search should be narrowed. The subtree of this dataverse and all its children will be searched. For example, https://apitest.dataverse.org/api/search?q=data&subtree=birds
+q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://demo.dataverse.org/api/search?q=title:data
+type string Can be either "dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://demo.dataverse.org/api/search?q=*&type=dataset
+subtree string The identifier of the dataverse to which the search should be narrowed. The subtree of this dataverse and all its children will be searched. For example, https://demo.dataverse.org/api/search?q=data&subtree=birds
sort string The sort field. Supported values include "name" and "date". See example under "order".
-order string The order in which to sort. Can either be "asc" or "desc". For example, https://apitest.dataverse.org/api/search?q=data&sort=name&order=asc
+order string The order in which to sort. Can either be "asc" or "desc". For example, https://demo.dataverse.org/api/search?q=data&sort=name&order=asc
per_page int The number of results to return per request. The default is 10. The max is 1000. See :ref:`iteration example `.
start int A cursor for paging through search results. See :ref:`iteration example `.
show_relevance boolean Whether or not to show details of which fields were matched by the query. False by default. See :ref:`advanced search example `.
@@ -35,7 +35,7 @@ fq string A filter query on the search term. Multiple "fq" parame
Basic Search Example
--------------------
-https://apitest.dataverse.org/api/search?q=trees
+https://demo.dataverse.org/api/search?q=trees
.. code-block:: json
@@ -52,8 +52,8 @@ https://apitest.dataverse.org/api/search?q=trees
{
"name":"Trees",
"type":"dataverse",
- "url":"https://apitest.dataverse.org/dataverse/trees",
- "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/7",
+ "url":"https://demo.dataverse.org/dataverse/trees",
+ "image_url":"https://demo.dataverse.org/api/access/dvCardImage/7",
"identifier":"trees",
"description":"A tree dataverse with some birds",
"published_at":"2016-05-10T12:53:38Z"
@@ -61,8 +61,8 @@ https://apitest.dataverse.org/api/search?q=trees
{
"name":"Chestnut Trees",
"type":"dataverse",
- "url":"https://apitest.dataverse.org/dataverse/chestnuttrees",
- "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/9",
+ "url":"https://demo.dataverse.org/dataverse/chestnuttrees",
+ "image_url":"https://demo.dataverse.org/api/access/dvCardImage/9",
"identifier":"chestnuttrees",
"description":"A dataverse with chestnut trees and an oriole",
"published_at":"2016-05-10T12:52:38Z"
@@ -70,8 +70,8 @@ https://apitest.dataverse.org/api/search?q=trees
{
"name":"trees.png",
"type":"file",
- "url":"https://apitest.dataverse.org/api/access/datafile/12",
- "image_url":"https://apitest.dataverse.org/api/access/fileCardImage/12",
+ "url":"https://demo.dataverse.org/api/access/datafile/12",
+ "image_url":"https://demo.dataverse.org/api/access/fileCardImage/12",
"file_id":"12",
"description":"",
"published_at":"2016-05-10T12:53:39Z",
@@ -84,8 +84,8 @@ https://apitest.dataverse.org/api/search?q=trees
{
"name":"Birds",
"type":"dataverse",
- "url":"https://apitest.dataverse.org/dataverse/birds",
- "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/2",
+ "url":"https://demo.dataverse.org/dataverse/birds",
+ "image_url":"https://demo.dataverse.org/api/access/dvCardImage/2",
"identifier":"birds",
"description":"A bird dataverse with some trees",
"published_at":"2016-05-10T12:57:27Z"
@@ -100,7 +100,7 @@ https://apitest.dataverse.org/api/search?q=trees
Advanced Search Example
-----------------------
-https://apitest.dataverse.org/api/search?q=finch&show_relevance=true&show_facets=true&fq=publicationDate:2016&subtree=birds
+https://demo.dataverse.org/api/search?q=finch&show_relevance=true&show_facets=true&fq=publicationDate:2016&subtree=birds
In this example, ``show_relevance=true`` matches per field are shown. Available facets are shown with ``show_facets=true`` and of the facets is being used with ``fq=publication_date_s:2015``. The search is being narrowed to the dataverse with the identifier "birds" with the parameter ``subtree=birds``.
@@ -118,8 +118,8 @@ In this example, ``show_relevance=true`` matches per field are shown. Available
{
"name":"Finches",
"type":"dataverse",
- "url":"https://apitest.dataverse.org/dataverse/finches",
- "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/3",
+ "url":"https://demo.dataverse.org/dataverse/finches",
+ "image_url":"https://demo.dataverse.org/api/access/dvCardImage/3",
"identifier":"finches",
"description":"A dataverse with finches",
"published_at":"2016-05-10T12:57:38Z",
@@ -145,7 +145,7 @@ In this example, ``show_relevance=true`` matches per field are shown. Available
"name":"Darwin's Finches",
"type":"dataset",
"url":"http://dx.doi.org/10.5072/FK2/G2VPE7",
- "image_url":"https://apitest.dataverse.org/api/access/dsCardImage/2",
+ "image_url":"https://demo.dataverse.org/api/access/dsCardImage/2",
"global_id":"doi:10.5072/FK2/G2VPE7",
"description": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.",
"published_at":"2016-05-10T12:57:45Z",
@@ -224,7 +224,7 @@ Be default, up to 10 results are returned with every request (though this can be
#!/usr/bin/env python
import urllib2
import json
- base = 'https://apitest.dataverse.org'
+ base = 'https://demo.dataverse.org'
rows = 10
start = 0
page = 1
diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py
index e9c09e0c61b..ca7b40d4dc6 100755
--- a/doc/sphinx-guides/source/conf.py
+++ b/doc/sphinx-guides/source/conf.py
@@ -14,6 +14,7 @@
import sys
import os
+from datetime import datetime
sys.path.insert(0, os.path.abspath('../../'))
import sphinx_bootstrap_theme
@@ -56,16 +57,16 @@
# General information about the project.
project = u'Dataverse'
-copyright = u'2016, The President & Fellows of Harvard College'
+copyright = u'%d, The President & Fellows of Harvard College' % datetime.now().year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-version = '4.5'
+version = '4.5.1'
# The full version, including alpha/beta/rc tags.
-release = '4.5'
+release = '4.5.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -358,7 +359,7 @@
epub_title = u'Dataverse'
epub_author = u'Dataverse Team'
epub_publisher = u'Dataverse Team'
-epub_copyright = u'2014, Dataverse Team'
+epub_copyright = u'%d, The President & Fellows of Harvard College' % datetime.now().year
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'Consilience Documentation'
diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst
index d3871380f52..532f2f75d7a 100755
--- a/doc/sphinx-guides/source/developers/dev-environment.rst
+++ b/doc/sphinx-guides/source/developers/dev-environment.rst
@@ -7,7 +7,7 @@ Development Environment
Assumptions
-----------
-This guide assumes you are using a Mac but we do have pages for :doc:`/developers/windows` and :doc:`/developers/ubuntu`.
+This guide assumes you are using a Mac. If you are using Windows or Linux, please reach out to other developers at https://groups.google.com/forum/#!forum/dataverse-dev
Requirements
------------
diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst
index 9225d00dcd6..a9daa12b16c 100755
--- a/doc/sphinx-guides/source/developers/index.rst
+++ b/doc/sphinx-guides/source/developers/index.rst
@@ -9,17 +9,14 @@ Developer Guide
Contents:
.. toctree::
- :maxdepth: 2
-
- intro
- dev-environment
- branching-strategy
- testing
- documentation
- debugging
- coding-style
- making-releases
- tools
- unf/index
-
+ intro
+ dev-environment
+ branching-strategy
+ testing
+ documentation
+ debugging
+ coding-style
+ making-releases
+ tools
+ unf/index
diff --git a/doc/sphinx-guides/source/developers/ubuntu.rst b/doc/sphinx-guides/source/developers/ubuntu.rst
deleted file mode 100755
index 9204a6171eb..00000000000
--- a/doc/sphinx-guides/source/developers/ubuntu.rst
+++ /dev/null
@@ -1,51 +0,0 @@
-======
-Ubuntu
-======
-
-Requirements
-------------
-
-Tested on Ubuntu 14.04.
-
-Java 8
-~~~~~~
-
-- ``sudo apt-get install openjdk-8-jdk openjdk-8-jre``
-
-
-Maven
-~~~~~
-
-- ``sudo apt-get install maven``
-
-
-Glassfish
-~~~~~~~~~
-
-- ``wget http://download.java.net/glassfish/4.1/release/glassfish-4.1.zip``
-
-- ``unzip glassfish-4.1*zip``
-
-
-PostgreSQL
-~~~~~~~~~~
-
-- ``sudo apt-get install postgresql postgresql-contrib``
-
-
-jq
-~~
-
-- ``sudo apt-get install jq``
-
-
-Curl
-~~~~
-
-- ``sudo apt-get install curl``
-
-
-Recommendations and Dev Environment
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Please visit :doc:`/developers/dev-environment/`
diff --git a/doc/sphinx-guides/source/developers/unf/index.rst b/doc/sphinx-guides/source/developers/unf/index.rst
index 3980909fc0b..dc2f37d0ba9 100644
--- a/doc/sphinx-guides/source/developers/unf/index.rst
+++ b/doc/sphinx-guides/source/developers/unf/index.rst
@@ -1,17 +1,17 @@
.. _unf:
-====================================
+=====================================
Universal Numerical Fingerprint (UNF)
-====================================
+=====================================
Contents:
.. toctree::
:maxdepth: 2
- unf-v3
- unf-v5
- unf-v6
+ unf-v3
+ unf-v5
+ unf-v6
.. figure:: ./img/unf-diagram.png
:align: center
diff --git a/doc/sphinx-guides/source/developers/unf/unf-v5.rst b/doc/sphinx-guides/source/developers/unf/unf-v5.rst
index 8606ff06ebc..4fb160c20ea 100644
--- a/doc/sphinx-guides/source/developers/unf/unf-v5.rst
+++ b/doc/sphinx-guides/source/developers/unf/unf-v5.rst
@@ -10,6 +10,3 @@ UNF Version 5
**To address this, the Project is about to release UNF Version 6. The release date is still being discussed. It may coincide with the release of Dataverse 4.0. Alternatively, the production version of DVN 3.6.3 may get upgraded to use UNF v6 prior to that. This will be announced shortly. In the process, we are solving another problem with UNF v5 - this time we've made an effort to offer very implementer-friendly documentation that describes the algorithm fully and unambiguously. So if you are interested in implementing your own version of a UNF calculator, (something we would like to encourage!) please proceed directly to the Version 6 documentation.**
**Going forward, we are going to offer a preserved version of the Version 5 library and, possibly, an online UNF v5 calculator, for the purposes of validating vectors and data sets for which published Version 5 UNFs exist.**
-
------
-
diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst
deleted file mode 100755
index 9ed6f10dcd5..00000000000
--- a/doc/sphinx-guides/source/developers/windows.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-=======
-Windows
-=======
-
-Developers using Windows who have trouble setting up their :doc:`/developers/dev-environment/` should reach out to over Dataverse developers per https://github.com/IQSS/dataverse/blob/master/CONTRIBUTING.md
diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst
index 1c3a88dcdef..8fd7056c032 100755
--- a/doc/sphinx-guides/source/index.rst
+++ b/doc/sphinx-guides/source/index.rst
@@ -3,10 +3,10 @@
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
-Dataverse 4.5 Guides
+Dataverse 4.5.1 Guides
======================
-These guides are for the most recent version of Dataverse. For the guides for **version 4.3.1** please go `here `_.
+These guides are for the most recent version of Dataverse. For the guides for **version 4.5** please go `here `_.
.. toctree::
:glob:
diff --git a/doc/sphinx-guides/source/installation/administration.rst b/doc/sphinx-guides/source/installation/administration.rst
index 6c665cbb650..59cf11652a3 100644
--- a/doc/sphinx-guides/source/installation/administration.rst
+++ b/doc/sphinx-guides/source/installation/administration.rst
@@ -72,6 +72,17 @@ User Administration
There isn't much in the way of user administration tools built in to Dataverse.
+Confirm Email
++++++++++++++
+
+Dataverse encourages builtin/local users to verify their email address upon signup or email change so that sysadmins can be assured that users can be contacted.
+
+The app will send a standard welcome email with a URL the user can click, which, when activated, will store a ``lastconfirmed`` timestamp in the ``authenticateduser`` table of the database. Any time this is "null" for a user (immediately after signup and/or changing of their Dataverse email address), their current email on file is considered to not be verified. The link that is sent expires after a time (the default is 24 hours), but this is configurable by a superuser via the ``:MinutesUntilConfirmEmailTokenExpires`` config option.
+
+Should users' URL token expire, they will see a "Verify Email" button on the account information page to send another URL.
+
+Sysadmins can determine which users have verified their email addresses by looking for the presence of the value ``emailLastConfirmed`` in the JSON output from listing users (see the "Admin" section of the :doc:`/api/native-api`). The email addresses for Shibboleth users are re-confirmed on every login.
+
Deleting an API Token
+++++++++++++++++++++
diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst
index 142e69ff3e3..2ebed65457f 100644
--- a/doc/sphinx-guides/source/installation/config.rst
+++ b/doc/sphinx-guides/source/installation/config.rst
@@ -90,7 +90,7 @@ Persistent identifiers are a required and integral part of the Dataverse platfor
JVM Options: :ref:`doi.baseurlstring`, :ref:`doi.username`, :ref:`doi.password`
-Database Settings: :ref:`:DoiProvider`, :ref:`:Protocol`, :ref:`:Authority`, :ref:`:DoiSeparator`
+Database Settings: :ref:`:DoiProvider <:DoiProvider>`, :ref:`:Protocol <:Protocol>`, :ref:`:Authority <:Authority>`, :ref:`:DoiSeparator <:DoiSeparator>`
Please note that any datasets creating using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured.
@@ -104,8 +104,9 @@ Once this configuration is complete, your Dataverse installation should be ready
JVM Options
-----------
-JVM stands Java Virtual Machine and as a Java application, Glassfish can read JVM options when it is started. A number of JVM options are configured by the installer below is a complete list of the Dataverse-specific JVM options. You can inspect the configured options by running ``asadmin list-jvm-options | egrep 'dataverse|doi'
-``.
+JVM stands Java Virtual Machine and as a Java application, Glassfish can read JVM options when it is started. A number of JVM options are configured by the installer below is a complete list of the Dataverse-specific JVM options. You can inspect the configured options by running:
+
+``asadmin list-jvm-options | egrep 'dataverse|doi'``
When changing values these values with ``asadmin``, you'll need to delete the old value before adding a new one, like this:
@@ -188,9 +189,11 @@ dataverse.dataAccess.thumbnail.pdf.limit
For limiting the size of thumbnail images generated from files.
+.. _doi.baseurlstring:
+
doi.baseurlstring
+++++++++++++++++
-.. _doi.baseurlstring:
+
As of this writing "https://ezid.cdlib.org" and "https://mds.datacite.org" are the only valid values. See also these related database settings below:
- :DoiProvider
@@ -198,14 +201,18 @@ As of this writing "https://ezid.cdlib.org" and "https://mds.datacite.org" are t
- :Authority
- :DoiSeparator
+.. _doi.username:
+
doi.username
++++++++++++
-.. _doi.username:
+
Used in conjuction with ``doi.baseurlstring``.
+.. _doi.password:
+
doi.password
++++++++++++
-.. _doi.password:
+
Used in conjuction with ``doi.baseurlstring``.
dataverse.handlenet.admcredfile
@@ -265,30 +272,45 @@ This is the email address that "system" emails are sent from such as password re
``curl -X PUT -d "Support " http://localhost:8080/api/admin/settings/:SystemEmail``
+:FooterCopyright
+++++++++++++++++
+
+By default the footer says "Copyright © [YYYY]" but you can add text after the year, as in the example below.
+
+``curl -X PUT -d ", The President & Fellows of Harvard College" http://localhost:8080/api/admin/settings/:FooterCopyright``
+
+.. _:DoiProvider:
+
:DoiProvider
++++++++++++
-.. _:DoiProvider:
+
As of this writing "EZID" and "DataCite" are the only valid options.
``curl -X PUT -d EZID http://localhost:8080/api/admin/settings/:DoiProvider``
+.. _:Protocol:
+
:Protocol
+++++++++
-.. _:Protocol:
+
As of this writing "doi" is the only valid option for the protocol for a persistent ID.
``curl -X PUT -d doi http://localhost:8080/api/admin/settings/:Protocol``
+.. _:Authority:
+
:Authority
++++++++++
-.. _:Authority:
+
Use the DOI authority assigned to you by your DoiProvider.
``curl -X PUT -d 10.xxxx http://localhost:8080/api/admin/settings/:Authority``
+.. _:DoiSeparator:
+
:DoiSeparator
+++++++++++++
-.. _:DoiSeparator:
+
It is recommended that you keep this as a slash ("/").
``curl -X PUT -d "/" http://localhost:8080/api/admin/settings/:DoiSeparator``
@@ -370,7 +392,9 @@ Limit the number of files in a zip that Dataverse will accept.
:GoogleAnalyticsCode
++++++++++++++++++++
-For setting up Google Analytics for your Dataverse installation.
+Set your Google Analytics Tracking ID thusly:
+
+``curl -X PUT -d 'trackingID' http://localhost:8080/api/admin/settings/:GoogleAnalyticsCode``
:SolrHostColonPort
++++++++++++++++++
@@ -392,7 +416,7 @@ The relative path URL to which users will be sent after signup. The default sett
The location of your TwoRavens installation. Activation of TwoRavens also requires the setting below, ``TwoRavensTabularView``
:TwoRavensTabularView
-+++++++++++++++++++
++++++++++++++++++++++
Set ``TwoRavensTabularView`` to true to allow a user to view tabular files via the TwoRavens application. This boolean affects whether a user will see the "Explore" button.
@@ -445,6 +469,11 @@ Set ``SearchHighlightFragmentSize`` to override the default value of 100 from ht
Allow for migration of non-conformant data (especially dates) from DVN 3.x to Dataverse 4.
+:MinutesUntilConfirmEmailTokenExpires
++++++++++++++++++++++++++++++++++++++
+
+The duration in minutes before "Confirm Email" URLs expire. The default is 1440 minutes (24 hours). See also :doc:`/installation/administration`.
+
:ShibEnabled
++++++++++++
@@ -454,3 +483,21 @@ This setting is experimental per :doc:`/installation/shibboleth`.
++++++++++++
Set to false to disallow local accounts to be created if you are using :doc:`shibboleth` but not for production use until https://github.com/IQSS/dataverse/issues/2838 has been fixed.
+
+:PiwikAnalyticsId
+++++++++++++++++++++
+
+Site identifier created in your Piwik instance. Example:
+
+``curl -X PUT -d 42 http://localhost:8080/api/admin/settings/:PiwikAnalyticsId``
+
+:PiwikAnalyticsHost
+++++++++++++++++++++
+
+Host FQDN or URL of your Piwik instance before the ``/piwik.php``. Examples:
+
+``curl -X PUT -d stats.domain.tld http://localhost:8080/api/admin/settings/:PiwikAnalyticsHost``
+
+or
+
+``curl -X PUT -d hostname.domain.tld/stats http://localhost:8080/api/admin/settings/:PiwikAnalyticsHost``
diff --git a/doc/sphinx-guides/source/installation/index.rst b/doc/sphinx-guides/source/installation/index.rst
index ba2992c5ec4..b418370f908 100755
--- a/doc/sphinx-guides/source/installation/index.rst
+++ b/doc/sphinx-guides/source/installation/index.rst
@@ -9,15 +9,13 @@ Installation Guide
Contents:
.. toctree::
- :titlesonly:
- :maxdepth: 2
- intro
- prep
- prerequisites
- installation-main
- config
- administration
- upgrading
- r-rapache-tworavens
- shibboleth
+ intro
+ prep
+ prerequisites
+ installation-main
+ config
+ administration
+ upgrading
+ r-rapache-tworavens
+ shibboleth
diff --git a/doc/sphinx-guides/source/installation/installer-script.rst b/doc/sphinx-guides/source/installation/installer-script.rst
deleted file mode 100644
index 72881587917..00000000000
--- a/doc/sphinx-guides/source/installation/installer-script.rst
+++ /dev/null
@@ -1 +0,0 @@
-This content has been moved to :doc:`/installation/installation-main`.
diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst
index 9530860dce9..4a30c5b9a12 100644
--- a/doc/sphinx-guides/source/installation/prerequisites.rst
+++ b/doc/sphinx-guides/source/installation/prerequisites.rst
@@ -98,7 +98,7 @@ The standard init script that ships RHEL 6 and similar should work fine. Enable
Configuring Database Access for the Dataverse Application (and the Dataverse Installer)
-=====================================================================================
+=======================================================================================
- The application and the installer script will be connecting to PostgreSQL over TCP/IP, using password authentication. In this section we explain how to configure PostgreSQL to accept these connections.
diff --git a/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst b/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst
index 44cd29570b9..a88ffa114d2 100644
--- a/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst
+++ b/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst
@@ -5,8 +5,8 @@ R, rApache and TwoRavens
Eventually, this document may be split into several parts, dedicated to individual components -
such as R, rApache and the TwoRavens applications. Particularly, if the TwoRavens team creates an "official" distribution with their own installation manual.
-0. PREREQUISITS
-+++++++++++++++
+0. PREREQUISITES
+++++++++++++++++
a. httpd (Apache):
------------------
@@ -43,6 +43,13 @@ yum install R R-devel
(EPEL distribution recommended; version 3.* required; 3.1.* recommended as of writing this)
+To pick up any needed dependencies, CentOS users may simply install the epel-release RPM.
+
+RHEL users will want to log in to their organization's respective RHN interface, find the particular machine in question and:
+
+• click on "Subscribed Channels: Alter Channel Subscriptions"
+• enable EPEL, Server Extras, Server Optional
+
c. rApache:
-----------
@@ -57,19 +64,14 @@ If you are using RHEL/CentOS 7, you can `download an experimental rapache-1.2.7-
rpm -ivh rapache-1.2.7-rpm0.x86_64.rpm
-d. Install libcurl-devel:
--------------------------
-
-(provides /usr/bin/curl-config, needed by some 3rd-party R packages; package installation *will fail silently* if it's not found!):
-
-``yum install libcurl-devel``
+d. Install system depencies:
+----------------------------
-Make sure you have the standard GNU compilers installed (needed for 3rd-party R packages to build themselves).
+The r-setup.sh script launches child processes which log to RINSTALL.* files. Once the script exits, search these files for the word "error" and be sure to install any missing dependencies and run the script again. At present, at minimum it needs:
-**Update**: As of Aug. 4 2015, it appears the following rpms had to be installed:
+``yum install libcurl-devel openssl-devel libxml2-devel ed libX11-devel libpng-devel mesa-libGL-devel mesa-libGLU-devel libpqxx-devel``
-``yum install openssl-devel``
-``yum install xml2-devel``
+Make sure you have the standard GNU compilers installed (needed for 3rd-party R packages to build themselves). CentOS 6 users will need gcc-fortran 4.6 or greater, available from the CentOS devtools repo.
Again, without these rpms, R package devtools was failing to install, silently or with a non-informative error message.
Note: this package ``devtools`` has proven to be very flaky; it is being very actively maintained, new dependencies are being constantly added and new bugs introduced... however, it is only needed to install the package ``Zelig``, the main R workhorse behind TwoRavens. It cannot be installed from CRAN, like all the other 3rd party packages we use - becase TwoRavens requires version 5, which is still in beta. So devtools is needed to build it from sources downloaded directly from github. Once Zelig 5 is released, we'll be able to drop the requirement for devtools - and that will make this process much simpler. For now, be prepared for it to be somewhat of an adventure.
@@ -82,7 +84,7 @@ R is used both by the Dataverse application, directly, and the TwoRavens compani
Two distinct interfaces are used to access R: Dataverse uses Rserve; and TwoRavens sends jobs to R running under rApache using Rook interface.
-We provide a shell script (``conf/R/r-setup.sh`` in the Dataverse source tree; you will need the other 3 files in that directory as well - `https://github.com/IQSS/dataverse/conf/R/ `__) that will attempt to install the required 3rd party packages; it will also configure Rserve and rserve user. rApache configuration will be addressed in its own section.
+We provide a shell script (``conf/R/r-setup.sh`` in the Dataverse source tree; you will need the other 3 files in that directory as well - `https://github.com/IQSS/dataverse/tree/master/conf/R `__) that will attempt to install the required 3rd party packages; it will also configure Rserve and rserve user. rApache configuration will be addressed in its own section.
The script will attempt to download the packages from CRAN (or a mirror) and GitHub, so the system must have access to the internet. On a server fully firewalled from the world, packages can be installed from downloaded sources. This is left as an exercise for the reader. Consult the script for insight.
@@ -192,7 +194,7 @@ Note that some of these packages have their own dependencies, and additional ins
install.pl script:
++++++++++++++++++
-I. Configure the TwoRavens web (Javascript) application.
+I. Configure the TwoRavens web (Javascript) application
-------------------------------------------------------
Edit the file ``/var/www/html/dataexplore/app_ddi.js``.
diff --git a/doc/sphinx-guides/source/user/data-exploration/index.rst b/doc/sphinx-guides/source/user/data-exploration/index.rst
index b6be872249c..708f774bb46 100755
--- a/doc/sphinx-guides/source/user/data-exploration/index.rst
+++ b/doc/sphinx-guides/source/user/data-exploration/index.rst
@@ -9,10 +9,6 @@ Data Exploration Guide
Contents:
.. toctree::
- :titlesonly:
- :maxdepth: 2
-
- tworavens
- worldmap
-
+ tworavens
+ worldmap
diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst
index 0a6200583e5..d0259286af9 100755
--- a/doc/sphinx-guides/source/user/dataset-management.rst
+++ b/doc/sphinx-guides/source/user/dataset-management.rst
@@ -89,8 +89,8 @@ For example, if these files were included within a .zip, the “Map Data” butt
* subway_line.dbf
Once you publish your dataset with your shape files, you will be able to use the "Map Data" button using `GeoConnect `_ to visualize and manipulate these files
-for users to Explore this geospatial data using the `WorldMap `_ interface.
-Please note: In order to map your data file, a copy will be sent to Harvard's `WorldMap `_ platform. You have the ability to delete any maps, and associated data, from the Harvard WorldMap platform, at any time.
+for users to Explore this geospatial data using the `WorldMap `__ interface.
+Please note: In order to map your data file, a copy will be sent to Harvard's `WorldMap `__ platform. You have the ability to delete any maps, and associated data, from the Harvard WorldMap platform, at any time.
Astronomy (FITS)
--------------------
@@ -222,7 +222,7 @@ The file permissions page has two sections: Users/Groups and Files.
To give someone access to your restricted files, click on the Grant Access to Users/Groups button in the Users/Groups section.
-.. _widgets:
+.. _dataset-widgets:
Widgets
=============================
@@ -310,7 +310,8 @@ a file, your dataset will automatically be bumped up to a major version (example
|image3|
-**Dataset Versions Tab**
+Version Details
+-------------------------------------
To view what has exactly changed starting from the originally published version to any subsequent published versions: click on the Versions tab on the dataset page to see all versions and changes made for that particular dataset. Once you have more than one version (can be version 1 and a draft), you can click the Show Details link in the Versions tab to learn more about the metadata fields and files that were either added or edited.
@@ -334,6 +335,6 @@ If you deaccession the most recently published version of the dataset but not al
:class: img-responsive
.. |image2| image:: ./img/data-download.png
:class: img-responsive
-.. |image3| image:: http://static.projects.iq.harvard.edu/files/styles/os_files_xxlarge/public/datascience/files/data_publishing_version_workflow.png?itok=8Z0PM-QC
+.. |image3| image:: ./img/data_publishing_version_workflow.png
:class: img-responsive
diff --git a/doc/sphinx-guides/source/user/dataverse-management.rst b/doc/sphinx-guides/source/user/dataverse-management.rst
index 6d45a055ecf..bac42305119 100755
--- a/doc/sphinx-guides/source/user/dataverse-management.rst
+++ b/doc/sphinx-guides/source/user/dataverse-management.rst
@@ -35,12 +35,13 @@ Edit Dataverse
To edit your dataverse, navigate to your dataverse homepage and select the "Edit Dataverse" button,
where you will be presented with the following editing options:
-- :ref:`General Information ` : edit name, identifier, category, contact email, affiliation, description, Metadata Elements, and facets for your dataverse.
-- :ref:`Theme + Widgets ` : upload a logo for your dataverse, add a link to your department or personal website, and select colors for your dataverse in order to brand it. Also, you can get code to add to your website to have your dataverse display on it.
-- :ref:`Permissions ` : give Dataverse users permissions to your dataverse, i.e.-can edit datasets, and see which users already have which permissions for your dataverse
-- :ref:`Dataset Templates ` : these are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in
-- :ref:`Dataset Guestbooks ` : allows you to collect data about who is downloading the files from your datasets
-- :ref:`Featured Dataverses ` : if you have one or more dataverses, you can use this option to show them at the top of your dataverse page to help others easily find interesting or important dataverses
+- :ref:`General Information `: edit name, identifier, category, contact email, affiliation, description, Metadata Elements, and facets for your dataverse
+- :ref:`Theme `: upload a logo for your dataverse, add a link to your department or personal website, and select colors for your dataverse in order to brand it
+- :ref:`Widgets `: get code to add to your website to have your dataverse display on it
+- :ref:`Permissions `: give Dataverse users permissions to your dataverse, i.e.-can edit datasets, and see which users already have which permissions for your dataverse
+- :ref:`Dataset Templates `: these are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in
+- :ref:`Dataset Guestbooks `: allows you to collect data about who is downloading the files from your datasets
+- :ref:`Featured Dataverses `: if you have one or more dataverses, you can use this option to show them at the top of your dataverse page to help others easily find interesting or important dataverses
- **Delete Dataverse**: you are able to delete your dataverse as long as it is not published and does not have any draft datasets
.. _general-information:
@@ -52,14 +53,14 @@ The General Information page is how you edit the information you filled in while
Tip: The metadata fields you select as required, will appear on the Create Dataset form when someone goes to add a dataset to the dataverse.
-.. _widgets:
+.. _theme:
Theme
====================================================
The Theme feature provides you with a way to customize the look of your dataverse. You can decide either to use the customization from the dataverse above yours or upload your own image file. Supported image types are JPEG, TIFF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. Additionally, you can select the colors for the header of your dataverse and the text that appears in your dataverse. You can also add a link to your personal website, the website for your organization or institution, your department, journal, etc.
-.. _widgets:
+.. _dataverse-widgets:
Widgets
=================================================
diff --git a/doc/sphinx-guides/source/user/img/data_publishing_version_workflow.png b/doc/sphinx-guides/source/user/img/data_publishing_version_workflow.png
new file mode 100644
index 00000000000..6ef11f31750
Binary files /dev/null and b/doc/sphinx-guides/source/user/img/data_publishing_version_workflow.png differ
diff --git a/doc/sphinx-guides/source/user/index.rst b/doc/sphinx-guides/source/user/index.rst
index 604ad4c2071..9d231cb5f6d 100755
--- a/doc/sphinx-guides/source/user/index.rst
+++ b/doc/sphinx-guides/source/user/index.rst
@@ -9,12 +9,11 @@ User Guide
Contents:
.. toctree::
- :maxdepth: 2
- account
- find-use-data
- dataverse-management
- dataset-management
- tabulardataingest/index
- data-exploration/index
- appendix
+ account
+ find-use-data
+ dataverse-management
+ dataset-management
+ tabulardataingest/index
+ data-exploration/index
+ appendix
diff --git a/doc/sphinx-guides/source/user/super-user.rst b/doc/sphinx-guides/source/user/super-user.rst
deleted file mode 100755
index 3586d0ea827..00000000000
--- a/doc/sphinx-guides/source/user/super-user.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Super User
-+++++++++++++++++++++++
-
-[Note: Documentation to be added about features available for super admins of
-the Dataverse, which provides several options for configuring and
-customizing your application.]
diff --git a/doc/sphinx-guides/source/user/tabulardataingest/index.rst b/doc/sphinx-guides/source/user/tabulardataingest/index.rst
index 0ca316502e7..a190710bdab 100755
--- a/doc/sphinx-guides/source/user/tabulardataingest/index.rst
+++ b/doc/sphinx-guides/source/user/tabulardataingest/index.rst
@@ -9,15 +9,11 @@ Tabular Data File Ingest
Contents:
.. toctree::
- :titlesonly:
- :maxdepth: 2
-
- supportedformats
- ingestprocess
- spss
- stata
- rdata
- excel
- csv
-
+ supportedformats
+ ingestprocess
+ spss
+ stata
+ rdata
+ excel
+ csv
diff --git a/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst b/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst
index b5d9311d603..ae2cc6cf7fe 100644
--- a/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst
+++ b/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst
@@ -92,10 +92,10 @@ the latter reserved for longer, descriptive text.
With variables ingested from R data frames the variable name will be
used for both the "name" and the "label".
-| *Optional R packages exist for providing descriptive variable labels;
- in one of the future versions support may be added for such a
- mechanism. It would of course work only for R files that were
- created with such optional packages*.
+*Optional R packages exist for providing descriptive variable labels;
+in one of the future versions support may be added for such a
+mechanism. It would of course work only for R files that were
+created with such optional packages*.
Similarly, R categorical values (factors) lack descriptive labels too.
**Note:** This is potentially confusing, since R factors do
@@ -132,7 +132,7 @@ value: unless the time zone was explicitly defined, R will adjust the
value to the current time zone. The resulting behavior is often
counter-intuitive: if you create a time value, for example:
- timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS");
+``timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS");``
on a computer configured for the San Francisco time zone, the value
will be differently displayed on computers in different time zones;
@@ -143,9 +143,11 @@ If it is important that the values are always displayed the same way,
regardless of the current time zones, it is recommended that the time
zone is explicitly defined. For example:
- attr(timevalue,"tzone")<-"PST"
+``attr(timevalue,"tzone")<-"PST"``
+
or
- timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST");
+
+``timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST");``
Now the value will always be displayed as "15:57 PST", regardless of
the time zone that is current for the OS ... **BUT ONLY** if the OS
@@ -189,7 +191,7 @@ wasn't defined explicitly, it implicitly becomes a time value in the
"UTC" zone!), this means that it is **impossible** to have 2 time
value vectors, in Stata/SPSS and R, that produce the same UNF.
-| **A pro tip:** if it is important to produce SPSS/Stata and R versions of
+**A pro tip:** if it is important to produce SPSS/Stata and R versions of
the same data set that result in the same UNF when ingested, you may
define the time variables as **strings** in the R data frame, and use
the "YYYY-MM-DD HH:mm:ss" formatting notation. This is the formatting used by the UNF
@@ -198,4 +200,4 @@ the same UNF as the vector of the same time values in Stata.
Note: date values (dates only, without time) should be handled the
exact same way as those in SPSS and Stata, and should produce the same
-UNFs.
\ No newline at end of file
+UNFs.
diff --git a/doc/sphinx-guides/source/user/tabulardataingest/stata.rst b/doc/sphinx-guides/source/user/tabulardataingest/stata.rst
new file mode 100644
index 00000000000..764bc815a2f
--- /dev/null
+++ b/doc/sphinx-guides/source/user/tabulardataingest/stata.rst
@@ -0,0 +1,9 @@
+Stata
+++++++++
+
+Of all the third party statistical software providers, Stata does the best job at documenting the internal format of their files, by far. And at making that documentation freely and easily available to developers (yes, we are looking at you, SPSS). Because of that, Stata is the best supported format for tabular data ingest.
+
+
+**New in Dataverse 4.0:** support for Stata v.13 has been added.
+
+
diff --git a/doc/sphinx_bootstrap_theme/bootstrap/layout.html b/doc/sphinx_bootstrap_theme/bootstrap/layout.html
index 3478e807b30..2193b142bb9 100755
--- a/doc/sphinx_bootstrap_theme/bootstrap/layout.html
+++ b/doc/sphinx_bootstrap_theme/bootstrap/layout.html
@@ -133,7 +133,7 @@
{%- if hasdoc('copyright') %}
{% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
{%- else %}
- {% trans copyright=copyright|e %}Data Science at The Institute for Quantitative Social Science | Code available at | Created using Sphinx {{ sphinx_version }}
Version {{ version }} | Last updated on {{ last_updated }}
© Copyright {{ copyright }} {% endtrans %}
+ {% trans copyright=copyright|e %}Data Science at The Institute for Quantitative Social Science | Code available at | Created using Sphinx {{ sphinx_version }}
Version {{ version }} | Last updated on {{ last_updated }}
© Copyright {{ copyright }} {% endtrans %}
{%- endif %}
{%- endif %}
diff --git a/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png b/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png
index 468574c26a9..65581c832f1 100644
Binary files a/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png and b/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png differ
diff --git a/pom.xml b/pom.xml
index 56edef4f9d8..7812b866812 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
edu.harvard.iq
dataverse
- 4.5
+ 4.5.1
war
dataverse
@@ -41,7 +41,7 @@
dataone.org
- http://dev-testing.dataone.org/maven
+ http://maven.dataone.org
true
diff --git a/scripts/api/data/ipGroup-all-ipv4.json b/scripts/api/data/ipGroup-all-ipv4.json
new file mode 100644
index 00000000000..c5ff32def44
--- /dev/null
+++ b/scripts/api/data/ipGroup-all-ipv4.json
@@ -0,0 +1,5 @@
+{
+ "alias":"all-ipv4",
+ "name":"IP group to match all IPv4 addresses",
+ "ranges" : [["0.0.0.0", "255.255.255.255"]]
+}
diff --git a/scripts/api/data/ipGroup3.json b/scripts/api/data/ipGroup-all.json
similarity index 100%
rename from scripts/api/data/ipGroup3.json
rename to scripts/api/data/ipGroup-all.json
diff --git a/scripts/api/data/ipGroup-localhost.json b/scripts/api/data/ipGroup-localhost.json
index 4a5f7facfef..4f8d2f708b2 100644
--- a/scripts/api/data/ipGroup-localhost.json
+++ b/scripts/api/data/ipGroup-localhost.json
@@ -1,6 +1,5 @@
{
"alias":"localhost",
"name":"Localhost connections",
- "ranges" : [["127.0.0.1", "127.0.0.1"],
- ["::1", "::1"]]
+ "addresses": [ "::1", "127.0.0.1" ]
}
diff --git a/scripts/api/data/ipGroup-single-IPv4.json b/scripts/api/data/ipGroup-single-IPv4.json
new file mode 100644
index 00000000000..515c512bcd1
--- /dev/null
+++ b/scripts/api/data/ipGroup-single-IPv4.json
@@ -0,0 +1,5 @@
+{
+ "alias":"singleIPv4",
+ "name":"Single IPv4",
+ "addresses" : ["128.0.0.7"]
+}
diff --git a/scripts/api/data/ipGroup-single-IPv6.json b/scripts/api/data/ipGroup-single-IPv6.json
new file mode 100644
index 00000000000..73eaa8e60a1
--- /dev/null
+++ b/scripts/api/data/ipGroup-single-IPv6.json
@@ -0,0 +1,5 @@
+{
+ "alias":"singleIPv6",
+ "name":"Single IPv6",
+ "addresses" : ["aa:bb:cc:dd:ee:ff::1"]
+}
diff --git a/scripts/api/data/ipGroupDuplicate-v1.json b/scripts/api/data/ipGroupDuplicate-v1.json
new file mode 100644
index 00000000000..eda0c8eb49b
--- /dev/null
+++ b/scripts/api/data/ipGroupDuplicate-v1.json
@@ -0,0 +1,7 @@
+{
+ "alias":"ipGroup-dup",
+ "name":"IP Group with duplicate files (1)",
+ "description":"This is the FIRST version of the group",
+ "ranges" : [["60.0.0.0", "60.0.0.255"],
+ ["60::1", "60::ffff"]]
+}
diff --git a/scripts/api/data/ipGroupDuplicate-v2.json b/scripts/api/data/ipGroupDuplicate-v2.json
new file mode 100644
index 00000000000..8db88e97fe7
--- /dev/null
+++ b/scripts/api/data/ipGroupDuplicate-v2.json
@@ -0,0 +1,7 @@
+{
+ "alias":"ipGroup-dup",
+ "name":"IP Group with duplicate files-v2",
+ "description":"This is the second version of the group",
+ "ranges" : [["70.0.0.0", "70.0.0.255"],
+ ["70::1", "70::ffff"]]
+}
diff --git a/scripts/api/setup-optional-harvard.sh b/scripts/api/setup-optional-harvard.sh
index 52caa31c1e0..3433e823014 100755
--- a/scripts/api/setup-optional-harvard.sh
+++ b/scripts/api/setup-optional-harvard.sh
@@ -22,6 +22,7 @@ curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectCreateEditMaps"
curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectViewMaps"
echo "- Setting system email"
curl -X PUT -d "Dataverse Support " http://localhost:8080/api/admin/settings/:SystemEmail
+curl -X PUT -d ", The President & Fellows of Harvard College" http://localhost:8080/api/admin/settings/:FooterCopyright
echo "- Setting up the Harvard Shibboleth institutional group"
curl -s -X POST -H 'Content-type:application/json' --upload-file data/shibGroupHarvard.json "$SERVER/admin/groups/shib?key=$adminKey"
echo
diff --git a/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql b/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql
new file mode 100644
index 00000000000..6296fca8a5f
--- /dev/null
+++ b/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql
@@ -0,0 +1 @@
+ALTER TABLE authenticateduser ADD COLUMN emailconfirmed timestamp without time zone;
diff --git a/scripts/deploy/apitest.dataverse.org/deploy b/scripts/deploy/apitest.dataverse.org/deploy
deleted file mode 100755
index bf27864daaf..00000000000
--- a/scripts/deploy/apitest.dataverse.org/deploy
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-scripts/deploy/apitest.dataverse.org/prep
-sudo /home/jenkins/dataverse/scripts/deploy/apitest.dataverse.org/rebuild
-scripts/deploy/apitest.dataverse.org/post
diff --git a/scripts/deploy/apitest.dataverse.org/dv-root.json b/scripts/deploy/apitest.dataverse.org/dv-root.json
deleted file mode 100644
index 8d0b4ecab19..00000000000
--- a/scripts/deploy/apitest.dataverse.org/dv-root.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "alias": "root",
- "name": "API Test",
- "permissionRoot": false,
- "facetRoot": true,
- "description": "Welcome! This is a playground for Dataverse API users. (Data will be deleted periodically.) Please see http://guides.dataverse.org/en/latest/api to get started and http://community.dataverse.org/community-groups/api.html to join the community!",
- "dataverseSubjects": [
- "Other"
- ],
- "dataverseContacts": [
- {
- "contactEmail": "root@mailinator.com"
-}
- ]
-}
diff --git a/scripts/deploy/apitest.dataverse.org/post b/scripts/deploy/apitest.dataverse.org/post
deleted file mode 100755
index 9b29be3a408..00000000000
--- a/scripts/deploy/apitest.dataverse.org/post
+++ /dev/null
@@ -1,13 +0,0 @@
-#/bin/sh
-cd scripts/api
-./setup-all.sh | tee /tmp/setup-all.sh.out
-cd ../..
-psql -U dvnapp dvndb -f scripts/database/reference_data.sql
-scripts/search/tests/publish-dataverse-root
-git checkout scripts/api/data/dv-root.json
-scripts/search/tests/grant-authusers-add-on-root
-scripts/search/populate-users
-scripts/search/create-users
-scripts/search/tests/create-all-and-test
-scripts/search/tests/publish-spruce1-and-test
-java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /usr/local/glassfish4/glassfish/domains/domain1/docroot/guides/developers/database/schemaspy
diff --git a/scripts/deploy/apitest.dataverse.org/prep b/scripts/deploy/apitest.dataverse.org/prep
deleted file mode 100755
index b7c181357b2..00000000000
--- a/scripts/deploy/apitest.dataverse.org/prep
+++ /dev/null
@@ -1,2 +0,0 @@
-#/bin/bash -x
-cp scripts/deploy/apitest.dataverse.org/dv-root.json scripts/api/data/dv-root.json
diff --git a/scripts/deploy/apitest.dataverse.org/rebuild b/scripts/deploy/apitest.dataverse.org/rebuild
deleted file mode 100755
index aca38a56ab4..00000000000
--- a/scripts/deploy/apitest.dataverse.org/rebuild
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-/usr/local/glassfish4/glassfish/bin/asadmin undeploy dataverse-4.0
-/usr/local/glassfish4/glassfish/bin/asadmin stop-domain
-rm -rf /usr/local/glassfish4/glassfish/domains/domain1/files
-psql -U dvnapp -c 'DROP DATABASE "dvndb"' template1
-echo $?
-curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}"
-psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1
-echo $?
-/usr/local/glassfish4/glassfish/bin/asadmin start-domain
-/usr/local/glassfish4/glassfish/bin/asadmin deploy /tmp/dataverse-4.0.war
diff --git a/scripts/deploy/apitest.dataverse.org/cert.md b/scripts/deploy/phoenix.dataverse.org/cert.md
similarity index 67%
rename from scripts/deploy/apitest.dataverse.org/cert.md
rename to scripts/deploy/phoenix.dataverse.org/cert.md
index 3b8084825d5..d68910fa15c 100644
--- a/scripts/deploy/apitest.dataverse.org/cert.md
+++ b/scripts/deploy/phoenix.dataverse.org/cert.md
@@ -1,13 +1,13 @@
Note that `-sha256` is used but the important thing is making sure SHA-1 is not selected when uploading the CSR to https://cert-manager.com/customer/InCommon
- openssl genrsa -out apitest.dataverse.org.key 2048
+ openssl genrsa -out phoenix.dataverse.org.key 2048
- openssl req -new -sha256 -key apitest.dataverse.org.key -out apitest.dataverse.org.csr
+ openssl req -new -sha256 -key phoenix.dataverse.org.key -out phoenix.dataverse.org.csr
Country Name (2 letter code) [XX]:US
State or Province Name (full name) []:Massachusetts
Locality Name (eg, city) [Default City]:Cambridge
Organization Name (eg, company) [Default Company Ltd]:Harvard College
Organizational Unit Name (eg, section) []:IQSS
- Common Name (eg, your name or your server's hostname) []:apitest.dataverse.org
+ Common Name (eg, your name or your server's hostname) []:phoenix.dataverse.org
Email Address []:support@dataverse.org
diff --git a/scripts/deploy/phoenix.dataverse.org/rebuild b/scripts/deploy/phoenix.dataverse.org/rebuild
index b59a46f4466..ca92ef59b9e 100755
--- a/scripts/deploy/phoenix.dataverse.org/rebuild
+++ b/scripts/deploy/phoenix.dataverse.org/rebuild
@@ -5,6 +5,8 @@ OLD_WAR=$(echo $LIST_APP | awk '{print $1}')
NEW_WAR=/tmp/dataverse.war
/usr/local/glassfish4/glassfish/bin/asadmin undeploy $OLD_WAR
/usr/local/glassfish4/glassfish/bin/asadmin stop-domain
+# blow away "generated" directory to avoid EJB Timer Service is not available" https://github.com/IQSS/dataverse/issues/3336
+rm -rf /usr/local/glassfish4/glassfish/domains/domain1/generated
rm -rf /usr/local/glassfish4/glassfish/domains/domain1/files
#psql -U postgres -c "CREATE ROLE dvnapp UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1
psql -U dvnapp -c 'DROP DATABASE "dvndb"' template1
diff --git a/scripts/issues/1380/01-add.localhost.sh b/scripts/issues/1380/01-add.localhost.sh
new file mode 100755
index 00000000000..331011d5fa2
--- /dev/null
+++ b/scripts/issues/1380/01-add.localhost.sh
@@ -0,0 +1,2 @@
+# Add the localhost group to the system.
+curl -X POST -H"Content-Type:application/json" -d@../../api/data/ipGroup-localhost.json localhost:8080/api/admin/groups/ip
diff --git a/scripts/issues/1380/02-build-dv-structure.sh b/scripts/issues/1380/02-build-dv-structure.sh
new file mode 100755
index 00000000000..f0936e3cf69
--- /dev/null
+++ b/scripts/issues/1380/02-build-dv-structure.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+echo Run this after running setup-users.sh, and making Pete an
+echo admin on the root dataverse.
+
+
+PETE=$(grep :result: users.out | grep Pete | cut -f4 -d: | tr -d \ )
+UMA=$(grep :result: users.out | grep Uma | cut -f4 -d: | tr -d \ )
+
+pushd ../../api
+./setup-dvs.sh $PETE $UMA
+popd
diff --git a/scripts/issues/1380/add-ip-group.sh b/scripts/issues/1380/add-ip-group.sh
new file mode 100755
index 00000000000..2fba944807c
--- /dev/null
+++ b/scripts/issues/1380/add-ip-group.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+# Add the passed group to the system.
+curl -X POST -H"Content-Type:application/json" -d@../../api/data/$1 localhost:8080/api/admin/groups/ip
diff --git a/scripts/issues/1380/add-user b/scripts/issues/1380/add-user
new file mode 100755
index 00000000000..1781181bb79
--- /dev/null
+++ b/scripts/issues/1380/add-user
@@ -0,0 +1,3 @@
+#!/bin/bash
+# add-user dv group user api-token
+curl -H "Content-type:application/json" -X POST -d"[$3]" localhost:8080/api/dataverses/$1/groups/$2/roleAssignees?key=$4
diff --git a/scripts/issues/1380/data/3-eg1.json b/scripts/issues/1380/data/3-eg1.json
new file mode 100644
index 00000000000..a874d69a2e8
--- /dev/null
+++ b/scripts/issues/1380/data/3-eg1.json
@@ -0,0 +1 @@
+["&explicit/3-eg1"]
diff --git a/scripts/issues/1380/data/guest.json b/scripts/issues/1380/data/guest.json
new file mode 100644
index 00000000000..3e4188a7167
--- /dev/null
+++ b/scripts/issues/1380/data/guest.json
@@ -0,0 +1 @@
+[":guest"]
diff --git a/scripts/issues/1380/data/locals.json b/scripts/issues/1380/data/locals.json
new file mode 100644
index 00000000000..8bb5e3e4162
--- /dev/null
+++ b/scripts/issues/1380/data/locals.json
@@ -0,0 +1 @@
+["&ip/localhost"]
diff --git a/scripts/issues/1380/data/pete.json b/scripts/issues/1380/data/pete.json
new file mode 100644
index 00000000000..298e813d2bc
--- /dev/null
+++ b/scripts/issues/1380/data/pete.json
@@ -0,0 +1 @@
+["@pete"]
diff --git a/scripts/issues/1380/data/uma.json b/scripts/issues/1380/data/uma.json
new file mode 100644
index 00000000000..3caf8c5c9cc
--- /dev/null
+++ b/scripts/issues/1380/data/uma.json
@@ -0,0 +1 @@
+["@uma"]
diff --git a/scripts/issues/1380/db-list-dvs b/scripts/issues/1380/db-list-dvs
new file mode 100755
index 00000000000..4161f7fdd03
--- /dev/null
+++ b/scripts/issues/1380/db-list-dvs
@@ -0,0 +1 @@
+psql dvndb -c "select dvobject.id, name, alias, owner_id from dvobject inner join dataverse on dvobject.id = dataverse.id"
diff --git a/scripts/issues/1380/delete-ip-group b/scripts/issues/1380/delete-ip-group
new file mode 100755
index 00000000000..b6138d95024
--- /dev/null
+++ b/scripts/issues/1380/delete-ip-group
@@ -0,0 +1,9 @@
+#/bin/bahx
+if [ $# -eq 0 ]
+ then
+ echo "Please provide IP group id"
+ echo "e.g $0 845"
+ exit 1
+fi
+
+curl -X DELETE http://localhost:8080/api/admin/groups/ip/$1
diff --git a/scripts/issues/1380/dvs.gv b/scripts/issues/1380/dvs.gv
new file mode 100644
index 00000000000..526066000a2
--- /dev/null
+++ b/scripts/issues/1380/dvs.gv
@@ -0,0 +1,19 @@
+digraph {
+d1[label="Root"]
+d2[label="Top dataverse of Pete"]
+d3[label="Pete's public place"]
+d4[label="Pete's restricted data"]
+d5[label="Pete's secrets"]
+d6[label="Top dataverse of Uma"]
+d7[label="Uma's first"]
+d8[label="Uma's restricted"]
+
+d1 -> d2
+d2 -> d3
+d2 -> d4
+d2 -> d5
+d1 -> d6
+d6 -> d7
+d6 -> d8
+
+}
diff --git a/scripts/issues/1380/dvs.pdf b/scripts/issues/1380/dvs.pdf
new file mode 100644
index 00000000000..5169f449420
Binary files /dev/null and b/scripts/issues/1380/dvs.pdf differ
diff --git a/scripts/issues/1380/explicitGroup1.json b/scripts/issues/1380/explicitGroup1.json
new file mode 100644
index 00000000000..337a0b62dcb
--- /dev/null
+++ b/scripts/issues/1380/explicitGroup1.json
@@ -0,0 +1,5 @@
+{
+ "description":"Sample Explicit Group",
+ "displayName":"Close Collaborators",
+ "aliasInOwner":"eg1"
+}
diff --git a/scripts/issues/1380/explicitGroup2.json b/scripts/issues/1380/explicitGroup2.json
new file mode 100644
index 00000000000..fbac263665c
--- /dev/null
+++ b/scripts/issues/1380/explicitGroup2.json
@@ -0,0 +1,5 @@
+{
+ "description":"Sample Explicit Group",
+ "displayName":"Not-So-Close Collaborators",
+ "aliasInOwner":"eg2"
+}
diff --git a/scripts/issues/1380/keys.txt b/scripts/issues/1380/keys.txt
new file mode 100644
index 00000000000..9dc47d356c1
--- /dev/null
+++ b/scripts/issues/1380/keys.txt
@@ -0,0 +1,3 @@
+Keys for P e t e and U m a. Produced by running setup-all.sh from the /scripts/api folder.
+Pete:757a6493-456a-4bf0-943e-9b559d551a3f
+Uma:8797f19b-b8aa-4f96-a789-1b99506f2eab
diff --git a/scripts/issues/1380/list-groups-for b/scripts/issues/1380/list-groups-for
new file mode 100755
index 00000000000..063b92c9b6a
--- /dev/null
+++ b/scripts/issues/1380/list-groups-for
@@ -0,0 +1,2 @@
+#!/bin/bash
+curl -s -X GET http://localhost:8080/api/test/explicitGroups/$1 | jq .
diff --git a/scripts/issues/1380/list-ip-groups.sh b/scripts/issues/1380/list-ip-groups.sh
new file mode 100755
index 00000000000..fba29cced4e
--- /dev/null
+++ b/scripts/issues/1380/list-ip-groups.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+curl -X GET http://localhost:8080/api/admin/groups/ip | jq .
diff --git a/scripts/issues/1380/truth-table.numbers b/scripts/issues/1380/truth-table.numbers
new file mode 100644
index 00000000000..86f67386fbb
Binary files /dev/null and b/scripts/issues/1380/truth-table.numbers differ
diff --git a/scripts/issues/1380/users.out b/scripts/issues/1380/users.out
new file mode 100644
index 00000000000..337b9e2ce01
--- /dev/null
+++ b/scripts/issues/1380/users.out
@@ -0,0 +1,6 @@
+{"status":"OK","data":{"user":{"id":4,"firstName":"Gabbi","lastName":"Guest","userName":"gabbi","affiliation":"low","position":"A Guest","email":"gabbi@malinator.com"},"authenticatedUser":{"id":4,"identifier":"@gabbi","displayName":"Gabbi Guest","firstName":"Gabbi","lastName":"Guest","email":"gabbi@malinator.com","superuser":false,"affiliation":"low","position":"A Guest","persistentUserId":"gabbi","authenticationProviderId":"builtin"},"apiToken":"d1940786-c315-491e-9812-a8ff809289cc"}}
+{"status":"OK","data":{"user":{"id":5,"firstName":"Cathy","lastName":"Collaborator","userName":"cathy","affiliation":"mid","position":"Data Scientist","email":"cathy@malinator.com"},"authenticatedUser":{"id":5,"identifier":"@cathy","displayName":"Cathy Collaborator","firstName":"Cathy","lastName":"Collaborator","email":"cathy@malinator.com","superuser":false,"affiliation":"mid","position":"Data Scientist","persistentUserId":"cathy","authenticationProviderId":"builtin"},"apiToken":"0ddfcb1e-fb51-4ce7-88ab-308b23e13e9a"}}
+{"status":"OK","data":{"user":{"id":6,"firstName":"Nick","lastName":"NSA","userName":"nick","affiliation":"gov","position":"Signals Intelligence","email":"nick@malinator.com"},"authenticatedUser":{"id":6,"identifier":"@nick","displayName":"Nick NSA","firstName":"Nick","lastName":"NSA","email":"nick@malinator.com","superuser":false,"affiliation":"gov","position":"Signals Intelligence","persistentUserId":"nick","authenticationProviderId":"builtin"},"apiToken":"6d74745d-1733-459a-ae29-422110056ec0"}}
+reporting API keys
+:result: Pete's key is: 757a6493-456a-4bf0-943e-9b559d551a3f
+:result: Uma's key is: 8797f19b-b8aa-4f96-a789-1b99506f2eab
\ No newline at end of file
diff --git a/scripts/issues/2438/download.R b/scripts/issues/2438/download.R
index 2d31ed0865b..eea7f185137 100644
--- a/scripts/issues/2438/download.R
+++ b/scripts/issues/2438/download.R
@@ -13,7 +13,7 @@ download.dataverse.file <- function(url) {
# look up the id of the file. As of this writing the easiest way is via SWORD:
# https://github.com/IQSS/dataverse/issues/1837#issuecomment-121736332
#
- # url.to.download = 'https://apitest.dataverse.org/api/v1/access/datafile/91'
+ # url.to.download = 'https://demo.dataverse.org/api/v1/access/datafile/91'
url.to.download = url
tsvfile = 'file.tsv'
download.file(url = url.to.download, destfile =
@@ -23,4 +23,4 @@ download.dataverse.file <- function(url) {
unlink(tsvfile)
}
-download.dataverse.file(arg)
\ No newline at end of file
+download.dataverse.file(arg)
diff --git a/scripts/search/tests/ipgroup-add b/scripts/search/tests/ipgroup-add
index 8033b277258..d41679fd188 100755
--- a/scripts/search/tests/ipgroup-add
+++ b/scripts/search/tests/ipgroup-add
@@ -1,5 +1,5 @@
#!/bin/sh
. scripts/search/export-keys
-OUTPUT=`curl -s -X POST -d @scripts/api/data/ipGroup3.json http://localhost:8080/api/admin/groups/ip -H "Content-type:application/json"`
+OUTPUT=`curl -s -X POST -d @scripts/api/data/ipGroup-all.json http://localhost:8080/api/admin/groups/ip -H "Content-type:application/json"`
echo $OUTPUT
echo $OUTPUT | jq .
diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties
index bc39855adc6..ca19a51533e 100755
--- a/src/main/java/Bundle.properties
+++ b/src/main/java/Bundle.properties
@@ -1,8 +1,10 @@
dataverse=Dataverse
newDataverse=New Dataverse
hostDataverse=Host Dataverse
+dataverses=Dataverses
passwd=Password
dataset=Dataset
+datasets=Datasets
newDataset=New Dataset
files=Files
file=File
@@ -90,7 +92,7 @@ footer.dataverseOnGitHub=Dataverse On GitHub
footer.dataverseProjectOn=Dataverse Project on
footer.Twitter=Twitter
footer.dataScienceIQSS=Developed at the Institute for Quantitative Social Science
-footer.copyright=Copyright © 2016, The President & Fellows of Harvard College
+footer.copyright=Copyright © {0}
footer.widget.datastored=Data is stored at {0}.
footer.widget.login=Log in to
footer.privacyPolicy=Privacy Policy
@@ -138,7 +140,8 @@ wasPublished=, was published in
wasReturnedByReviewer=, was returned by the curator of
toReview=Don't forget to publish it or send it back to the contributor!
worldMap.added=dataset had a WorldMap layer data added to it.
-notification.welcome=Welcome to {0} Dataverse! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}.
+# Bundle file editors, please note that "notification.welcome" is used in a unit test.
+notification.welcome=Welcome to {0} Dataverse! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address.
notification.demoSite=Demo Site
notification.requestFileAccess=File access requested for dataset: {0}.
notification.grantFileAccess=Access granted for files in dataset: {0}.
@@ -204,6 +207,16 @@ login.builtin.invalidUsernameEmailOrPassword=The username, email address, or pas
# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922
login.error=Error validating the username, email address, or password. Please try again. If the problem persists, contact an administrator.
+#confirmemail.xhtml
+confirmEmail.pageTitle=Email Verification
+confirmEmail.submitRequest=Verify Email
+confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}.
+confirmEmail.details.success=Email address verified!
+confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button.
+confirmEmail.details.goToAccountPageButton=Go to Account Information
+confirmEmail.notVerified=Not Verified
+confirmEmail.verified=Verified
+
#shib.xhtml
shib.btn.convertAccount=Convert Account
shib.btn.createAccount=Create Account
@@ -420,9 +433,12 @@ notification.email.returned.dataset.subject=Dataverse: Your dataset has been ret
notification.email.create.account.subject=Dataverse: Your account has been created
notification.email.assign.role.subject=Dataverse: You have been assigned a role
notification.email.revoke.role.subject=Dataverse: Your role has been revoked
+notification.email.verifyEmail.subject=Dataverse: Verify your email address
notification.email.greeting=Hello, \n
+# Bundle file editors, please note that "notification.email.welcome" is used in a unit test
notification.email.welcome=Welcome to Dataverse! Get started by adding or finding data. Have questions? Check out the User Guide at {0}/{1}/user/ or contact Dataverse Support for assistance. Want to test out Dataverse features? Use our Demo Site at https://demo.dataverse.org
+notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0}. Note, the verify link will expire after {1}. Send another verification email by visiting your account page.
notification.email.requestFileAccess=File access requested for dataset: {0}. Manage permissions at {1}.
notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1}).
notification.email.rejectFileAccess=Access rejected for requested files in dataset: {0} (view at {1}).
@@ -437,6 +453,11 @@ notification.email.worldMap.added={0} (view at {1}) had WorldMap layer data adde
notification.email.closing=\n\nThank you,\nThe Dataverse Project
notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}).
notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}).
+notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance.
+hours=hours
+hour=hour
+minutes=minutes
+minute=minute
# passwordreset.xhtml
@@ -717,11 +738,11 @@ dataverse.permissions.roles.copy=Copy Role
# permissions-manage-files.xhtml
-dataverse.permissionsFiles.title=File Permissions
+dataverse.permissionsFiles.title=Restricted File Permissions
dataverse.permissionsFiles.usersOrGroups=Users/Groups
dataverse.permissionsFiles.usersOrGroups.assignBtn=Grant Access to Users/Groups
-dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to files in this dataset.
+dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to restricted files in this dataset.
dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation)
dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID
dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email
@@ -731,7 +752,8 @@ dataverse.permissionsFiles.usersOrGroups.file=File
dataverse.permissionsFiles.usersOrGroups.files=Files
dataverse.permissionsFiles.usersOrGroups.invalidMsg=There are no users or groups with access to the restricted files in this dataset.
-dataverse.permissionsFiles.files=Files
+dataverse.permissionsFiles.files=Restricted Files
+dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files}
dataverse.permissionsFiles.files.description=All the restricted files in this dataset.
dataverse.permissionsFiles.files.tabHeader.fileName=File Name
dataverse.permissionsFiles.files.tabHeader.roleAssignees=Users/Groups
@@ -743,8 +765,11 @@ dataverse.permissionsFiles.files.public=Public
dataverse.permissionsFiles.files.restricted=Restricted
dataverse.permissionsFiles.files.roleAssignee=User/Group
dataverse.permissionsFiles.files.roleAssignees=Users/Groups
+dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups}
dataverse.permissionsFiles.files.assignBtn=Assign Access
dataverse.permissionsFiles.files.invalidMsg=There are no restricted files in this dataset.
+dataverse.permissionsFiles.files.requested=Requested Files
+dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2}
dataverse.permissionsFiles.viewRemoveDialog.header=File Access
dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Remove Access
@@ -752,12 +777,11 @@ dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=Are you sure
dataverse.permissionsFiles.assignDialog.header=Grant File Access
dataverse.permissionsFiles.assignDialog.description=Grant file access to users and groups.
-dataverse.permissionsFiles.assignDialog.userOrGroup=User/Group
-dataverse.permissionsFiles.assignDialog.userOrGroup.title=User/Group
+dataverse.permissionsFiles.assignDialog.userOrGroup=Users/Groups
dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Enter User/Group Name
dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No matches found.
dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group.
-dataverse.permissionsFiles.assignDialog.file=File
+dataverse.permissionsFiles.assignDialog.fileName=File Name
dataverse.permissionsFiles.assignDialog.grantBtn=Grant
dataverse.permissionsFiles.assignDialog.rejectBtn=Reject
@@ -779,11 +803,12 @@ dataverse.permissions.Q2.answer.curator.description=- Edit metadata, upload file
dataverse.permissions.usersOrGroups.assignDialog.header=Assign Role
dataverse.permissions.usersOrGroups.assignDialog.description=Grant permissions to users and groups by assigning them a role.
-dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=User/Group
+dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Users/Groups
dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Enter User/Group Name
dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No matches found.
dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group.
dataverse.permissions.usersOrGroups.assignDialog.role.description=These are the permissions associated with the selected role.
+dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}.
dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Please select a role to assign.
# roles-edit.xhtml
@@ -804,6 +829,7 @@ dataverse.permissions.explicitGroupEditDialog.title.new=Create Group
dataverse.permissions.explicitGroupEditDialog.title.edit=Edit Group {0}
dataverse.permissions.explicitGroupEditDialog.help=Add users or other groups to this group.
dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Group Identifier
+dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group.
dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=Group identifier cannot be empty
dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=Group identifier can contain only letters, digits, underscores (_) and dashes (-)
dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consists of letters, digits, underscores (_) and dashes (-)
@@ -978,6 +1004,8 @@ dataset.editBtn.itemLabel.terms=Terms
dataset.editBtn.itemLabel.permissions=Permissions
dataset.editBtn.itemLabel.widgets=Widgets
dataset.editBtn.itemLabel.privateUrl=Private URL
+dataset.editBtn.itemLabel.permissionsDataset=Dataset
+dataset.editBtn.itemLabel.permissionsFile=Restricted Files
dataset.editBtn.itemLabel.deleteDataset=Delete Dataset
dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version
dataset.editBtn.itemLabel.deaccession=Deaccession Dataset
@@ -1125,10 +1153,11 @@ file.noSelectedFiles.tip=There are no selected files to display.
file.noUploadedFiles.tip=Files you upload will appear here.
file.delete=Delete
file.metadata=Metadata
-file.deleted.success=Files {0} will be permanently deleted from this version of this dataset once you click on the Save Changes button.
+file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button.
+file.editAccess=Edit Access
file.restrict=Restrict
file.unrestrict=Unrestrict
-file.restricted.success=The file(s) {0} will be restricted after you click on the Save Changes button on the bottom of this page.
+file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button.
file.download.header=Download
file.preview=Preview:
file.fileName=File Name
diff --git a/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java b/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java
index 742c86c70b8..0a6a930a94e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java
+++ b/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java
@@ -71,7 +71,7 @@ public String getPublisher() {
public String toString() {
StringBuilder citation = new StringBuilder("@data{");
citation.append(persistentId.getIdentifier() + "_" + year + "," + "\r\n");
- citation.append("author = {").append(String.join("; ", authors)).append("},\r\n");
+ citation.append("author = {").append(String.join(" and ", authors)).append("},\r\n");
citation.append("publisher = {").append(publisher).append("},\r\n");
citation.append("title = {").append(title).append("},\r\n");
citation.append("year = {").append(year).append("},\r\n");
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 24c01e3b107..057faf4211e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -16,6 +16,7 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.Files;
+import java.util.Comparator;
import javax.persistence.Entity;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
@@ -44,7 +45,7 @@
, @Index(columnList="md5")
, @Index(columnList="contenttype")
, @Index(columnList="restricted")})
-public class DataFile extends DvObject {
+public class DataFile extends DvObject implements Comparable {
private static final long serialVersionUID = 1L;
public static final char INGEST_STATUS_NONE = 65;
@@ -228,6 +229,11 @@ public String getOriginalFileFormat() {
return null;
}
+ @Override
+ public boolean isAncestorOf( DvObject other ) {
+ return equals(other);
+ }
+
/*
* A user-friendly version of the "original format":
*/
@@ -604,6 +610,13 @@ public String getDisplayName() {
return getLatestFileMetadata().getLabel();
}
+ @Override
+ public int compareTo(Object o) {
+ DataFile other = (DataFile) o;
+ return this.getDisplayName().toUpperCase().compareTo(other.getDisplayName().toUpperCase());
+
+ }
+
/**
* Check if the Geospatial Tag has been assigned to this file
* @return
@@ -619,4 +632,4 @@ public boolean hasGeospatialTag(){
}
return false;
}
-}
+}
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
index 0acf76dc00e..39a953d0de7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java
@@ -654,5 +654,9 @@ public String getDisplayName() {
protected boolean isPermissionRoot() {
return false;
}
-
+
+ @Override
+ public boolean isAncestorOf( DvObject other ) {
+ return equals(other) || equals(other.getOwner());
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 882608dff45..55811ac8b1a 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -83,7 +83,6 @@
import javax.faces.event.AjaxBehaviorEvent;
-import javax.faces.context.ExternalContext;
import org.apache.commons.lang.StringEscapeUtils;
import org.primefaces.component.tabview.TabView;
@@ -187,7 +186,6 @@ public enum DisplayMode {
private List dataverseTemplates = new ArrayList();
private Template defaultTemplate;
private Template selectedTemplate;
- private String globalId;
private String persistentId;
private String version;
private String protocol = "";
@@ -322,11 +320,7 @@ public Long getMaxFileUploadSizeInBytes(){
}
public boolean isUnlimitedUploadFileSize(){
-
- if (this.maxFileUploadSizeInBytes == null){
- return true;
- }
- return false;
+ return (this.maxFileUploadSizeInBytes == null);
}
public boolean isMetadataExportEnabled() {
@@ -455,9 +449,8 @@ public boolean isNoDVsRemaining() {
* Convenience method for "Download File" button display logic
*
* Used by the dataset.xhtml render logic when listing files
- * > Assume user already has view access to the file list
- * ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^!!!
- *
+ * Assumes user already has view access to the file list.
+ *
* @param fileMetadata
* @return boolean
*/
@@ -498,29 +491,13 @@ public boolean canDownloadFile(FileMetadata fileMetadata){
}
// --------------------------------------------------------------------
- // Conditions (2) through (4) are for Restricted files
+ // Conditions (2) through (3) are for Restricted files
// --------------------------------------------------------------------
// --------------------------------------------------------------------
- // (2) In Dataverse 4.3 and earlier we required that users be authenticated
- // to download files, but in developing the Private URL feature, we have
- // added a new subclass of "User" called "PrivateUrlUser" that returns false
- // for isAuthenticated but that should be able to download restricted files
- // when given the Member role (which includes the DownloadFile permission).
- // This is consistent with how Builtin and Shib users (both are
- // AuthenticatedUsers) can download restricted files when they are granted
- // the Member role. For this reason condition 2 has been changed. Previously,
- // we required isSessionUserAuthenticated to return true. Now we require
- // that the User is not an instance of GuestUser, which is similar in
- // spirit to the previous check.
- // --------------------------------------------------------------------
- if (session.getUser() instanceof GuestUser){
- this.fileDownloadPermissionMap.put(fid, false);
- return false;
- }
-
- // --------------------------------------------------------------------
- // (3) Does the User have DownloadFile Permission at the **Dataset** level
+ // (2) Does the User have DownloadFile Permission at the **Dataset** level
+ // Michael: Leaving this in for now, but shouldn't this be alredy resolved
+ // by the premission system, given that files are never permission roots?
// --------------------------------------------------------------------
if (this.doesSessionUserHaveDataSetPermission(Permission.DownloadFile)){
// Yes, save answer and return true
@@ -529,7 +506,7 @@ public boolean canDownloadFile(FileMetadata fileMetadata){
}
// --------------------------------------------------------------------
- // (4) Does the user has DownloadFile permission on the DataFile
+ // (3) Does the user has DownloadFile permission on the DataFile
// --------------------------------------------------------------------
if (this.permissionService.on(fileMetadata.getDataFile()).has(Permission.DownloadFile)){
this.fileDownloadPermissionMap.put(fid, true);
@@ -537,7 +514,7 @@ public boolean canDownloadFile(FileMetadata fileMetadata){
}
// --------------------------------------------------------------------
- // (6) No download....
+ // (4) No download for you! Come back with permissions!
// --------------------------------------------------------------------
this.fileDownloadPermissionMap.put(fid, false);
@@ -558,7 +535,7 @@ public boolean isThumbnailAvailable(FileMetadata fileMetadata) {
// Another convenience method - to cache Update Permission on the dataset:
public boolean canUpdateDataset() {
- return permissionsWrapper.canUpdateDataset(this.session.getUser(), this.dataset);
+ return permissionsWrapper.canUpdateDataset(dvRequestService.getDataverseRequest(), this.dataset);
}
public boolean canPublishDataverse() {
@@ -579,13 +556,9 @@ public boolean canPublishDataverse() {
//}
public boolean canViewUnpublishedDataset() {
- return permissionsWrapper.canViewUnpublishedDataset(this.session.getUser(), this.dataset);
- //return doesSessionUserHaveDataSetPermission(Permission.ViewUnpublishedDataset);
+ return permissionsWrapper.canViewUnpublishedDataset( dvRequestService.getDataverseRequest(), dataset);
}
- private Boolean sessionUserAuthenticated = null;
-
-
/*
* 4.2.1 optimization.
* HOWEVER, this doesn't appear to be saving us anything!
@@ -593,28 +566,7 @@ public boolean canViewUnpublishedDataset() {
* every time; it doesn't do any new db lookups.
*/
public boolean isSessionUserAuthenticated() {
- logger.fine("entering isSessionUserAuthenticated;");
- if (sessionUserAuthenticated != null) {
- logger.fine("using cached isSessionUserAuthenticated;");
-
- return sessionUserAuthenticated;
- }
-
- if (session == null) {
- return false;
- }
-
- if (session.getUser() == null) {
- return false;
- }
-
- if (session.getUser().isAuthenticated()) {
- sessionUserAuthenticated = true;
- return true;
- }
-
- sessionUserAuthenticated = false;
- return false;
+ return session.getUser().isAuthenticated();
}
/**
diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
index cb04cfb0d9a..44f0a7a77b4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
+++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java
@@ -32,7 +32,6 @@
import javax.validation.constraints.Size;
import org.hibernate.validator.constraints.NotBlank;
import org.hibernate.validator.constraints.NotEmpty;
-import org.hibernate.validator.constraints.URL;
/**
*
@@ -734,6 +733,14 @@ public boolean isPermissionRoot() {
public void setPermissionRoot(boolean permissionRoot) {
this.permissionRoot = permissionRoot;
}
-
+
+ @Override
+ public boolean isAncestorOf( DvObject other ) {
+ while ( other != null ) {
+ if ( equals(other) ) return true;
+ other = other.getOwner();
+ }
+ return false;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
index cfb2fcc252a..3d35e548347 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataversePage.java
@@ -58,12 +58,10 @@ public class DataversePage implements java.io.Serializable {
private static final Logger logger = Logger.getLogger(DataversePage.class.getCanonicalName());
public enum EditMode {
-
CREATE, INFO, FEATURED
}
public enum LinkMode {
-
SAVEDSEARCH, LINKDATAVERSE
}
@@ -111,8 +109,8 @@ public enum LinkMode {
private LinkMode linkMode;
private Long ownerId;
- private DualListModel facets = new DualListModel<>(new ArrayList(), new ArrayList());
- private DualListModel featuredDataverses = new DualListModel<>(new ArrayList(), new ArrayList());
+ private DualListModel facets = new DualListModel<>(new ArrayList<>(), new ArrayList<>());
+ private DualListModel featuredDataverses = new DualListModel<>(new ArrayList<>(), new ArrayList<>());
private List dataversesForLinking;
private Long linkingDataverseId;
private List linkingDVSelectItems;
@@ -173,10 +171,8 @@ public void setDataverseSubjectControlledVocabularyValues(List roleAssignments;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index a86374185e8..851fc5a50cc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -550,7 +550,7 @@ public void restrictFiles(boolean restricted) {
if (fileNames == null) {
fileNames = fmd.getLabel();
} else {
- fileNames = fileNames.concat(fmd.getLabel());
+ fileNames = fileNames.concat(", " + fmd.getLabel());
}
}
fmd.setRestricted(restricted);
@@ -590,7 +590,7 @@ public void restrictFilesDP(boolean restricted) {
if (fileNames == null) {
fileNames = fmd.getLabel();
} else {
- fileNames = fileNames.concat(fmd.getLabel());
+ fileNames = fileNames.concat(", " + fmd.getLabel());
}
}
if (fmd.getDataFile().equals(fmw.getDataFile())) {
@@ -628,7 +628,7 @@ public void deleteFiles() {
if (fileNames == null) {
fileNames = fmd.getLabel();
} else {
- fileNames = fileNames.concat(fmd.getLabel());
+ fileNames = fileNames.concat(", " + fmd.getLabel());
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
index f82b7a3e666..0893909a6c0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/MailServiceBean.java
@@ -9,6 +9,7 @@
import edu.harvard.iq.dataverse.authorization.groups.Group;
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
+import edu.harvard.iq.dataverse.confirmemail.ConfirmEmailServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean.Key;
import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -62,6 +63,8 @@ public class MailServiceBean implements java.io.Serializable {
PermissionServiceBean permissionService;
@EJB
GroupServiceBean groupService;
+ @EJB
+ ConfirmEmailServiceBean confirmEmailService;
private static final Logger logger = Logger.getLogger(MailServiceBean.class.getCanonicalName());
@@ -426,6 +429,9 @@ private String getMessageTextBasedOnNotification(UserNotification userNotificati
systemConfig.getGuidesBaseUrl(),
systemConfig.getVersion()
));
+ String optionalConfirmEmailAddon = confirmEmailService.optionalConfirmEmailAddonMsg(userNotification.getUser());
+ accountCreatedMessage += optionalConfirmEmailAddon;
+ logger.info("accountCreatedMessage: " + accountCreatedMessage);
return messageText += accountCreatedMessage;
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
index f37b568d333..db25987694b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageFilePermissionsPage.java
@@ -26,6 +26,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.ejb.EJB;
@@ -37,6 +38,9 @@
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import org.apache.commons.lang.StringUtils;
+import org.primefaces.event.SelectEvent;
+import org.primefaces.event.ToggleSelectEvent;
+import org.primefaces.event.UnselectEvent;
/**
*
@@ -80,9 +84,9 @@ public class ManageFilePermissionsPage implements java.io.Serializable {
DataverseSession session;
Dataset dataset = new Dataset();
- private final Map> roleAssigneeMap = new HashMap<>();
- private final Map> fileMap = new HashMap<>();
- private final Map> fileAccessRequestMap = new HashMap<>();
+ private final TreeMap> roleAssigneeMap = new TreeMap<>();
+ private final TreeMap> fileMap = new TreeMap<>();
+ private final TreeMap> fileAccessRequestMap = new TreeMap<>();
public Dataset getDataset() {
return dataset;
@@ -92,19 +96,19 @@ public void setDataset(Dataset dataset) {
this.dataset = dataset;
}
- public Map> getRoleAssigneeMap() {
+ public TreeMap> getRoleAssigneeMap() {
return roleAssigneeMap;
}
- public Map> getFileMap() {
+ public TreeMap> getFileMap() {
return fileMap;
}
- public Map> getFileAccessRequestMap() {
+ public TreeMap> getFileAccessRequestMap() {
return fileAccessRequestMap;
}
-
-
+
+
public String init() {
if (dataset.getId() != null) {
dataset = datasetService.find(dataset.getId());
@@ -118,9 +122,7 @@ public String init() {
if (!permissionService.on(dataset).has(Permission.ManageDatasetPermissions)) {
return permissionsWrapper.notAuthorized();
}
-
initMaps();
-
return "";
}
@@ -229,6 +231,7 @@ public void setSelectedRoleAssignmentRows(List selectedRoleAs
}
public void initViewRemoveDialogByFile(DataFile file, List raRows) {
+ setSelectedRoleAssignmentRows(new ArrayList());
this.selectedFile = file;
this.selectedRoleAssignee = null;
this.roleAssignments = raRows;
@@ -236,6 +239,7 @@ public void initViewRemoveDialogByFile(DataFile file, List ra
}
public void initViewRemoveDialogByRoleAssignee(RoleAssignee ra, List raRows) {
+ setSelectedRoleAssignmentRows(new ArrayList());
this.selectedFile = null;
this.selectedRoleAssignee = ra;
this.roleAssignments = raRows;
@@ -311,7 +315,7 @@ public void initAssignDialogForFileRequester(AuthenticatedUser au) {
fileRequester = au;
selectedRoleAssignees = null;
selectedFiles.clear();
- selectedFiles.addAll(fileAccessRequestMap.get(au));
+ selectedFiles.addAll(fileAccessRequestMap.get(au));
showUserGroupMessages();
}
@@ -369,7 +373,7 @@ private void grantAccessToRequests(AuthenticatedUser au, List files) {
}
if (actionPerformed) {
JsfHelper.addSuccessMessage("File Access request by " + au.getDisplayInfo().getTitle() + " was granted.");
- userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId());
+ userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.GRANTFILEACCESS, dataset.getId());
initMaps();
}
@@ -408,7 +412,9 @@ private boolean assignRole(RoleAssignee ra, DataFile file, DataverseRole r) {
JH.addMessage(FacesMessage.SEVERITY_ERROR, "The role was not able to be assigned.", "Permissions " + ex.getRequiredPermissions().toString() + " missing.");
return false;
} catch (CommandException ex) {
- JH.addMessage(FacesMessage.SEVERITY_FATAL, "The role was not able to be assigned.");
+ //JH.addMessage(FacesMessage.SEVERITY_FATAL, "The role was not able to be assigned.");
+ String message = r.getName() + " role could NOT be assigned to " + ra.getDisplayInfo().getTitle() + " for " + file.getDisplayName() + ".";
+ JsfHelper.addErrorMessage(message);
logger.log(Level.SEVERE, "Error assiging role: " + ex.getMessage(), ex);
return false;
}
@@ -449,7 +455,6 @@ public void setRenderFileMessages(boolean renderFileMessages) {
-
// inner class used fordisplay of role assignments
public static class RoleAssignmentRow {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
index 1452f802cf6..e5751216ddb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManageGroupsPage.java
@@ -1,8 +1,3 @@
-/*
- * To change this license header, choose License Headers in Project Properties.
- * To change this template file, choose Tools | Templates
- * and open the template in the editor.
- */
package edu.harvard.iq.dataverse;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
@@ -12,7 +7,6 @@
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup;
import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
-import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.CreateExplicitGroupCommand;
@@ -23,7 +17,6 @@
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
-import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
@@ -41,8 +34,6 @@
import org.apache.commons.lang.StringUtils;
/**
- * TODO: should we add groups to dataverse obj?
- * TODO: add support for logical groups.
* @author michaelsuo
*/
@ViewScoped
@@ -94,21 +85,15 @@ public String init() {
if (editDv == null) {
return permissionsWrapper.notFound();
}
-
+
Boolean hasPermissions = permissionsWrapper.canIssueCommand(editDv, CreateExplicitGroupCommand.class);
hasPermissions |= permissionsWrapper.canIssueCommand(editDv, DeleteExplicitGroupCommand.class);
hasPermissions |= permissionsWrapper.canIssueCommand(editDv, UpdateExplicitGroupCommand.class);
if (!hasPermissions) {
return permissionsWrapper.notAuthorized();
}
- explicitGroups = new LinkedList<>();
+ explicitGroups = new LinkedList<>(explicitGroupService.findByOwner(getDataverseId()));
- List explicitGroupsForThisDataverse =
- explicitGroupService.findByOwner(getDataverseId());
-
- for (ExplicitGroup g : explicitGroupsForThisDataverse) {
- getExplicitGroups().add(g);
- }
return null;
}
@@ -160,7 +145,7 @@ public void deleteGroup() {
}
}
- private List selectedGroupRoleAssignees = new LinkedList<>();
+ private List selectedGroupRoleAssignees = new ArrayList<>();
public void setSelectedGroupRoleAssignees(List newSelectedGroupRoleAssignees) {
this.selectedGroupRoleAssignees = newSelectedGroupRoleAssignees;
@@ -184,26 +169,19 @@ public void viewSelectedGroup(ExplicitGroup selectedGroup) {
this.selectedGroup = selectedGroup;
// initialize member list for autocomplete interface
- setSelectedGroupAddRoleAssignees(new LinkedList());
+ setSelectedGroupAddRoleAssignees(new LinkedList<>());
setSelectedGroupRoleAssignees(getExplicitGroupMembers(selectedGroup));
}
/**
* Return the set of all role assignees for an explicit group.
* Does not traverse subgroups.
- * TODO right now only checks for authenticated users and explicit groups.
* @param eg The explicit group to check.
* @return The set of role assignees belonging to explicit group.
*/
public List getExplicitGroupMembers(ExplicitGroup eg) {
- if (eg != null) {
- List ras = new LinkedList<>();
- ras.addAll(eg.getContainedAuthenticatedUsers());
- ras.addAll(eg.getContainedExplicitGroups());
- return ras;
- } else {
- return null;
- }
+ return (eg != null) ?
+ new ArrayList(eg.getDirectMembers()) : null;
}
/**
@@ -223,8 +201,15 @@ public String getRoleAssigneeTypeString(RoleAssignee ra) {
}
public String getMembershipString(ExplicitGroup eg) {
- long userCount = getGroupAuthenticatedUserCount(eg);
- long groupCount = getGroupGroupCount(eg);
+ long userCount = 0;
+ long groupCount = 0;
+ for ( RoleAssignee ra : eg.getDirectMembers() ) {
+ if ( ra instanceof User ) {
+ userCount++;
+ } else {
+ groupCount++;
+ }
+ }
if (userCount == 0 && groupCount == 0) {
return "No members";
@@ -246,28 +231,6 @@ public String getMembershipString(ExplicitGroup eg) {
return memberString;
}
- /**
- * Returns the number of authenticated users in an {@code ExplicitGroup}.
- * Does not traverse subgroups.
- * @param group The {@code ExplicitGroup} to get the user count for
- * @return User count as long
- */
- public long getGroupAuthenticatedUserCount(ExplicitGroup eg) {
- Set aus = eg.getContainedAuthenticatedUsers();
- return aus.size();
- }
-
- /**
- * Returns the number of explicit groups in an {@code ExplicitGroup}.
- * Does not traverse subgroups.
- * @param group The {@code ExplicitGroup} to get the group count for
- * @return Group count as long
- */
- public long getGroupGroupCount(ExplicitGroup eg) {
- Set egs = eg.getContainedExplicitGroups();
- return egs.size();
- }
-
public void removeMemberFromSelectedGroup(RoleAssignee ra) {
selectedGroup.remove(ra);
}
@@ -308,7 +271,7 @@ public void initExplicitGroupDialog(ActionEvent ae) {
setExplicitGroupName("");
setExplicitGroupIdentifier("");
setNewExplicitGroupDescription("");
- setNewExplicitGroupRoleAssignees(new LinkedList());
+ setNewExplicitGroupRoleAssignees(new LinkedList<>());
FacesContext context = FacesContext.getCurrentInstance();
setSelectedGroupRoleAssignees(null);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
index 48724e1f97d..8446599c1e8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ManagePermissionsPage.java
@@ -2,10 +2,10 @@
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
+import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper;
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.RoleAssignee;
import edu.harvard.iq.dataverse.authorization.RoleAssigneeDisplayInfo;
-import edu.harvard.iq.dataverse.authorization.groups.Group;
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers;
import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroup;
@@ -27,6 +27,7 @@
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
+import java.util.ResourceBundle;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
@@ -39,7 +40,6 @@
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import org.apache.commons.lang.StringEscapeUtils;
-import org.apache.commons.lang.StringUtils;
/**
*
@@ -80,15 +80,22 @@ public class ManagePermissionsPage implements java.io.Serializable {
@Inject
DataverseSession session;
+
+ private DataverseRolePermissionHelper dataverseRolePermissionHelper;
+ private List roleList;
DvObject dvObject = new Dataverse(); // by default we use a Dataverse, but this will be overridden in init by the findById
-
+
public DvObject getDvObject() {
return dvObject;
}
public void setDvObject(DvObject dvObject) {
this.dvObject = dvObject;
+ /*
+ SEK 09/15/2016 - may need to do something here if permissions are transmitted/inherited from dataverse to dataverse
+ */
+
/*if (dvObject instanceof DvObjectContainer) {
inheritAssignments = !((DvObjectContainer) dvObject).isPermissionRoot();
}*/
@@ -115,6 +122,9 @@ public String init() {
if (dvObject instanceof Dataverse) {
initAccessSettings();
}
+ roleList = roleService.findAll();
+ roleAssignments = initRoleAssignments();
+ dataverseRolePermissionHelper = new DataverseRolePermissionHelper(roleList);
return "";
}
@@ -131,9 +141,19 @@ public RoleAssignment getSelectedRoleAssignment() {
public void setSelectedRoleAssignment(RoleAssignment selectedRoleAssignment) {
this.selectedRoleAssignment = selectedRoleAssignment;
- }
-
+ }
+
+ private List roleAssignments;
+
public List getRoleAssignments() {
+ return roleAssignments;
+ }
+
+ public void setRoleAssignments(List roleAssignments) {
+ this.roleAssignments = roleAssignments;
+ }
+
+ public List initRoleAssignments() {
List raList = null;
if (dvObject != null && dvObject.getId() != null) {
Set ras = roleService.rolesAssignments(dvObject);
@@ -159,7 +179,7 @@ public void removeRoleAssignment() {
if (dvObject instanceof Dataverse) {
initAccessSettings(); // in case the revoke was for the AuthenticatedUsers group
}
-
+ roleAssignments = initRoleAssignments();
showAssignmentMessages();
}
@@ -291,6 +311,7 @@ public void saveConfiguration(ActionEvent e) {
}
}
}
+ roleAssignments = initRoleAssignments();
showConfigureMessages();
}
@@ -362,6 +383,47 @@ public DataverseRole getAssignedRole() {
}
return null;
}
+
+ public String getAssignedRoleObjectTypes(){
+ String retString = "";
+ if (selectedRoleId != null) {
+ /* SEK 09/15/2016 SEK commenting out for now
+ because permissions are not inherited
+
+ if (dataverseRolePermissionHelper.hasDataversePermissions(selectedRoleId) && dvObject instanceof Dataverse){
+ String dvLabel = ResourceBundle.getBundle("Bundle").getString("dataverses");
+ retString = dvLabel;
+ }
+ */
+ if (dataverseRolePermissionHelper.hasDatasetPermissions(selectedRoleId) && dvObject instanceof Dataverse){
+ String dsLabel = ResourceBundle.getBundle("Bundle").getString("datasets");
+ if(!retString.isEmpty()) {
+ retString +=", " + dsLabel;
+ } else {
+ retString = dsLabel;
+ }
+
+ }
+ if (dataverseRolePermissionHelper.hasFilePermissions(selectedRoleId)){
+ String filesLabel = ResourceBundle.getBundle("Bundle").getString("files");
+ if(!retString.isEmpty()) {
+ retString +=", " + filesLabel;
+ } else {
+ retString = filesLabel;
+ }
+ }
+ return retString;
+ }
+ return null;
+ }
+
+ public String getDefinitionLevelString(){
+ if (dvObject != null){
+ if (dvObject instanceof Dataverse) return ResourceBundle.getBundle("Bundle").getString("dataverse");
+ if (dvObject instanceof Dataset) return ResourceBundle.getBundle("Bundle").getString("dataset");
+ }
+ return null;
+ }
public void assignRole(ActionEvent evt) {
logger.info("Got to assignRole");
@@ -373,6 +435,7 @@ public void assignRole(ActionEvent evt) {
for (RoleAssignee roleAssignee : selectedRoleAssigneesList) {
assignRole(roleAssignee, roleService.find(selectedRoleId));
}
+ roleAssignments = initRoleAssignments();
}
/**
@@ -409,10 +472,12 @@ private void assignRole(RoleAssignee ra, DataverseRole r) {
} catch (PermissionException ex) {
JH.addMessage(FacesMessage.SEVERITY_ERROR, "The role was not able to be assigned.", "Permissions " + ex.getRequiredPermissions().toString() + " missing.");
} catch (CommandException ex) {
- JH.addMessage(FacesMessage.SEVERITY_FATAL, "The role was not able to be assigned.");
+ String message = r.getName() + " role could NOT be assigned to " + ra.getDisplayInfo().getTitle() + " for " + StringEscapeUtils.escapeHtml(dvObject.getDisplayName()) + ".";
+ JsfHelper.addErrorMessage(message);
+ //JH.addMessage(FacesMessage.SEVERITY_FATAL, "The role was not able to be assigned.");
logger.log(Level.SEVERE, "Error assiging role: " + ex.getMessage(), ex);
}
-
+
showAssignmentMessages();
}
@@ -471,6 +536,15 @@ public void updateRole(ActionEvent e) {
}
showRoleMessages();
}
+
+
+ public DataverseRolePermissionHelper getDataverseRolePermissionHelper() {
+ return dataverseRolePermissionHelper;
+ }
+
+ public void setDataverseRolePermissionHelper(DataverseRolePermissionHelper dataverseRolePermissionHelper) {
+ this.dataverseRolePermissionHelper = dataverseRolePermissionHelper;
+ }
/*
============================================================================
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
index 82019c3db85..ef6ab430c8d 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionServiceBean.java
@@ -1,6 +1,5 @@
package edu.harvard.iq.dataverse;
-import edu.harvard.iq.dataverse.api.datadeposit.SwordAuth;
import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean;
@@ -10,7 +9,6 @@
import edu.harvard.iq.dataverse.authorization.groups.Group;
import edu.harvard.iq.dataverse.authorization.groups.GroupServiceBean;
import edu.harvard.iq.dataverse.authorization.groups.GroupUtil;
-import edu.harvard.iq.dataverse.authorization.groups.impl.builtin.AuthenticatedUsers;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.Command;
@@ -28,7 +26,10 @@
import javax.persistence.PersistenceContext;
import static edu.harvard.iq.dataverse.engine.command.CommandHelper.CH;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import java.util.Arrays;
import java.util.LinkedList;
+import java.util.logging.Level;
+import java.util.stream.Collectors;
import javax.persistence.Query;
/**
@@ -45,15 +46,10 @@ public class PermissionServiceBean {
private static final Logger logger = Logger.getLogger(PermissionServiceBean.class.getName());
- private static final EnumSet PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY = EnumSet.noneOf( Permission.class );
-
- static {
- for ( Permission p : Permission.values() ) {
- if ( p.requiresAuthenticatedUser() ) {
- PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY.add(p);
- }
- }
- }
+ private static final Set PERMISSIONS_FOR_AUTHENTICATED_USERS_ONLY =
+ EnumSet.copyOf(Arrays.asList(Permission.values()).stream()
+ .filter( Permission::requiresAuthenticatedUser )
+ .collect( Collectors.toList() ));
@EJB
BuiltinUserServiceBean userService;
@@ -210,9 +206,11 @@ public Set permissionsFor( DataverseRequest req, DvObject dvo ) {
// Add permissions specifically given to the user
permissions.addAll( permissionsForSingleRoleAssignee(req.getUser(),dvo) );
Set groups = groupService.groupsFor(req,dvo);
+
// Add permissions gained from groups
for ( Group g : groups ) {
- permissions.addAll( permissionsForSingleRoleAssignee(g,dvo) );
+ final Set groupPremissions = permissionsForSingleRoleAssignee(g,dvo);
+ permissions.addAll(groupPremissions);
}
if ( ! req.getUser().isAuthenticated() ) {
@@ -253,14 +251,16 @@ public Set permissionsFor(RoleAssignee ra, DvObject dvo) {
private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObject d) {
// super user check
- // @todo for 4.0, we are allowing superusers all permissions
+ // for 4.0, we are allowing superusers all permissions
// for secure data, we may need to restrict some of the permissions
if (ra instanceof AuthenticatedUser && ((AuthenticatedUser) ra).isSuperuser()) {
return EnumSet.allOf(Permission.class);
}
-
+
+ // Start with no permissions, build from there.
Set retVal = EnumSet.noneOf(Permission.class);
+ // File special case.
if (d instanceof DataFile) {
// unrestricted files that are part of a release dataset
// automatically get download permission for everybody:
@@ -274,6 +274,7 @@ private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObje
for (FileMetadata fm : df.getOwner().getReleasedVersion().getFileMetadatas()) {
if (df.equals(fm.getDataFile())) {
retVal.add(Permission.DownloadFile);
+ break;
}
}
}
@@ -281,10 +282,14 @@ private Set permissionsForSingleRoleAssignee(RoleAssignee ra, DvObje
}
}
- for (RoleAssignment asmnt : assignmentsFor(ra, d)) {
- retVal.addAll(asmnt.getRole().permissions());
- }
+ // Direct assignments to ra on d
+ assignmentsFor(ra, d).forEach(
+ asmnt -> retVal.addAll(asmnt.getRole().permissions())
+ );
+ // Recurse up the group containment hierarchy.
+ groupService.groupsFor(ra, d).forEach(
+ grp -> retVal.addAll(permissionsForSingleRoleAssignee(grp, d)));
return retVal;
}
@@ -386,7 +391,7 @@ public List getDataversesUserHasPermissionOn(AuthenticatedUser user,
* query?
*/
String query = "SELECT id FROM dvobject WHERE dtype = 'Dataverse' and id in (select definitionpoint_id from roleassignment where assigneeidentifier in (" + identifiers + "));";
- logger.fine("query: " + query);
+ logger.log(Level.FINE, "query: {0}", query);
Query nativeQuery = em.createNativeQuery(query);
List dataverseIdsToCheck = nativeQuery.getResultList();
List dataversesUserHasPermissionOn = new LinkedList<>();
diff --git a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
index 9a60f71d0df..3261cf1287c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/PermissionsWrapper.java
@@ -8,6 +8,7 @@
import edu.harvard.iq.dataverse.authorization.Permission;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.Command;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.impl.*;
import java.util.HashMap;
import java.util.Map;
@@ -42,7 +43,9 @@ public class PermissionsWrapper implements java.io.Serializable {
/**
* Check if the current Dataset can Issue Commands
*
- * @param commandName
+ * @param dvo Target dataverse object.
+ * @param command The command to execute
+ * @return {@code true} if the user can issue the command on the object.
*/
public boolean canIssueCommand(DvObject dvo, Class extends Command> command) {
if ((dvo==null) || (dvo.getId()==null)){
@@ -118,7 +121,7 @@ public boolean canManageDataversePermissions(User u, Dataverse dv) {
if (u==null){
return false;
}
- return permissionService.userOn(u, dv).has(Permission.ManageDataversePermissions);
+ return permissionService.requestOn(dvRequestService.getDataverseRequest(), dv).has(Permission.ManageDataversePermissions);
}
public boolean canManageDatasetPermissions(User u, Dataset ds) {
@@ -128,15 +131,15 @@ public boolean canManageDatasetPermissions(User u, Dataset ds) {
if (u==null){
return false;
}
- return permissionService.userOn(u, ds).has(Permission.ManageDatasetPermissions);
+ return permissionService.requestOn(dvRequestService.getDataverseRequest(), ds).has(Permission.ManageDatasetPermissions);
}
- public boolean canViewUnpublishedDataset(User user, Dataset dataset) {
- return doesSessionUserHaveDataSetPermission(user, dataset, Permission.ViewUnpublishedDataset);
+ public boolean canViewUnpublishedDataset(DataverseRequest dr, Dataset dataset) {
+ return doesSessionUserHaveDataSetPermission(dr, dataset, Permission.ViewUnpublishedDataset);
}
- public boolean canUpdateDataset(User user, Dataset dataset) {
- return doesSessionUserHaveDataSetPermission(user, dataset, Permission.EditDataset);
+ public boolean canUpdateDataset(DataverseRequest dr, Dataset dataset) {
+ return doesSessionUserHaveDataSetPermission(dr, dataset, Permission.EditDataset);
}
@@ -147,12 +150,12 @@ public boolean canUpdateDataset(User user, Dataset dataset) {
*
* Check Dataset related permissions
*
- * @param user
+ * @param req
* @param dataset
* @param permissionToCheck
* @return
*/
- public boolean doesSessionUserHaveDataSetPermission(User user, Dataset dataset, Permission permissionToCheck){
+ public boolean doesSessionUserHaveDataSetPermission(DataverseRequest req, Dataset dataset, Permission permissionToCheck){
if (permissionToCheck == null){
return false;
}
@@ -167,8 +170,7 @@ public boolean doesSessionUserHaveDataSetPermission(User user, Dataset dataset,
}
// Check the permission
- //
- boolean hasPermission = this.permissionService.userOn(user, dataset).has(permissionToCheck);
+ boolean hasPermission = this.permissionService.requestOn(req, dataset).has(permissionToCheck);
// Save the permission
this.datasetPermissionMap.put(permName, hasPermission);
@@ -196,17 +198,16 @@ public boolean hasDownloadFilePermission(DvObject dvo){
// Check permissions
//
- if (this.permissionService.on(dvo).has(Permission.DownloadFile)){
+ if ( permissionService.on(dvo).has(Permission.DownloadFile) ){
// Yes, has permission, store result
- //
- this.fileDownloadPermissionMap.put(dvo.getId(), true);
+ fileDownloadPermissionMap.put(dvo.getId(), true);
return true;
- }else {
+
+ } else {
// No permission, store result
- //
- this.fileDownloadPermissionMap.put(dvo.getId(), false);
+ fileDownloadPermissionMap.put(dvo.getId(), false);
return false;
}
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
index 76601774000..7c413d98e68 100644
--- a/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/RoleAssigneeServiceBean.java
@@ -11,6 +11,8 @@
import edu.harvard.iq.dataverse.authorization.groups.impl.explicit.ExplicitGroupServiceBean;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.authorization.users.GuestUser;
+import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.mydata.MyDataFilterParams;
import edu.harvard.iq.dataverse.privateurl.PrivateUrlUtil;
import java.util.ArrayList;
import java.util.List;
@@ -18,6 +20,7 @@
import java.util.Set;
import java.util.TreeMap;
import java.util.logging.Logger;
+import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.ejb.Stateless;
@@ -51,10 +54,10 @@ public class RoleAssigneeServiceBean {
@EJB
DataverseRoleServiceBean dataverseRoleService;
- Map predefinedRoleAssignees = new TreeMap<>();
+ protected Map predefinedRoleAssignees = new TreeMap<>();
@PostConstruct
- void setup() {
+ protected void setup() {
GuestUser gu = GuestUser.get();
predefinedRoleAssignees.put(gu.getIdentifier(), gu);
predefinedRoleAssignees.put(AuthenticatedUsers.get().getIdentifier(), AuthenticatedUsers.get());
@@ -126,15 +129,21 @@ private String getRoleIdListClause(List roleIdList) {
return " AND r.role_id IN (" + StringUtils.join(outputList, ",") + ")";
}
- public List getAssigneeDataverseRoleFor(AuthenticatedUser au) {
- String roleAssigneeIdentifier = "@" + au.getUserIdentifier();
- if (roleAssigneeIdentifier == null) {
+ public List getAssigneeDataverseRoleFor(DataverseRequest dataverseRequest) {
+
+ if (dataverseRequest == null){
+ throw new NullPointerException("dataverseRequest cannot be null!");
+ }
+ AuthenticatedUser au = dataverseRequest.getAuthenticatedUser();
+ if (au.getUserIdentifier() == null){
return null;
}
+ String roleAssigneeIdentifier = "@" + au.getUserIdentifier();
+
List retList = new ArrayList();
roleAssigneeIdentifier = roleAssigneeIdentifier.replaceAll("\\s", ""); // remove spaces from string
- List userGroups = getUserExplicitGroups(roleAssigneeIdentifier.replace("@", ""));
- List userRunTimeGroups = getUserRuntimeGroups(au);
+ List userGroups = getUserExplicitGroups(au);
+ List userRunTimeGroups = getUserRuntimeGroups(dataverseRequest);
String identifierClause = " WHERE r.assigneeIdentifier= '" + roleAssigneeIdentifier + "'";
if (userGroups != null || userRunTimeGroups != null) {
@@ -153,16 +162,25 @@ public List getAssigneeDataverseRoleFor(AuthenticatedUser au) {
return retList;
}
- public List