diff --git a/doc/Sphinx/source/img/image1institutional.png b/doc/Sphinx/source/img/image1institutional.png deleted file mode 100755 index 39852ea1c23..00000000000 Binary files a/doc/Sphinx/source/img/image1institutional.png and /dev/null differ diff --git a/doc/Sphinx/source/img/image2institutional.png b/doc/Sphinx/source/img/image2institutional.png deleted file mode 100755 index a14fded0cc3..00000000000 Binary files a/doc/Sphinx/source/img/image2institutional.png and /dev/null differ diff --git a/doc/Sphinx/source/img/image3institutional.png b/doc/Sphinx/source/img/image3institutional.png deleted file mode 100755 index bc213a656f9..00000000000 Binary files a/doc/Sphinx/source/img/image3institutional.png and /dev/null differ diff --git a/doc/Sphinx/source/img/image4institutional.png b/doc/Sphinx/source/img/image4institutional.png deleted file mode 100755 index 40f1c03d1d7..00000000000 Binary files a/doc/Sphinx/source/img/image4institutional.png and /dev/null differ diff --git a/doc/shib/shib.md b/doc/shib/shib.md index 5d2b3a55858..f29f9cc485e 100644 --- a/doc/shib/shib.md +++ b/doc/shib/shib.md @@ -6,8 +6,6 @@ FIXME: merge with what's in the Installation Guide: http://guides.dataverse.org/ ## Set up a valid SSL cert -See also notes on setting up the SSL cert for https://apitest.dataverse.org at https://github.com/IQSS/dataverse/tree/master/scripts/deploy/apitest.dataverse.org - ### Create a private key [root@dvn-vm3 ~]# openssl genrsa -out /root/cert/shibtest.dataverse.org.key 2048 diff --git a/doc/sphinx-guides/source/admin/index.rst b/doc/sphinx-guides/source/admin/index.rst index e7dfb0bf46a..28a46cf58e5 100755 --- a/doc/sphinx-guides/source/admin/index.rst +++ b/doc/sphinx-guides/source/admin/index.rst @@ -13,9 +13,8 @@ These "superuser" tasks are managed via the new page called the Dashboard. A use Contents: .. toctree:: - :maxdepth: 2 - harvestclients - harvestserver - metadataexport - timers + harvestclients + harvestserver + metadataexport + timers diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 7bb2805b99a..66ea551d446 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -60,11 +60,11 @@ Multiple File ("bundle") download Returns the files listed, zipped. Parameters: -~~~~~~~~~~ +~~~~~~~~~~~ none. "All Formats" bundled access for Tabular Files. ----------------------------------------------- +----------------------------------------------- ``/api/access/datafile/bundle/$id`` @@ -78,7 +78,7 @@ It returns a zipped bundle that contains the data in the following formats: * File citation, in Endnote and RIS formats. Parameters: -~~~~~~~~~~ +~~~~~~~~~~~ none. Data Variable Metadata Access diff --git a/doc/sphinx-guides/source/api/index.rst b/doc/sphinx-guides/source/api/index.rst index 152ea180236..b9d30d20e91 100755 --- a/doc/sphinx-guides/source/api/index.rst +++ b/doc/sphinx-guides/source/api/index.rst @@ -11,16 +11,15 @@ interoperate with the Dataverse to utilize our APIs. In 4.0, we require to get a token, by simply registering for a Dataverse account, before using our APIs (We are considering making some of the APIs completely public in the future - no token required - if you use it only a few times). -Rather than using a production installation of Dataverse, API users should use http://apitest.dataverse.org for testing. +Rather than using a production installation of Dataverse, API users are welcome to use http://demo.dataverse.org for testing. Contents: .. toctree:: - :maxdepth: 2 - sword - search - dataaccess - native-api - client-libraries - apps + sword + search + dataaccess + native-api + client-libraries + apps diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 59af4f00eee..8b686df66cd 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -89,7 +89,7 @@ Publish the Dataverse pointed by ``identifier``, which can either by the dataver Datasets ~~~~~~~~ -**Note** Creation of new datasets is done by ``POST``ing them onto dataverses. See dataverse section. +**Note** Creation of new datasets is done with a ``POST`` onto dataverses. See dataverse section. **Note** In all commands below, dataset versions can be referred to as: @@ -125,12 +125,12 @@ List versions of the dataset:: Show a version of the dataset. The Dataset also include any metadata blocks the data might have:: GET http://$SERVER/api/datasets/$id/versions/$versionNumber?key=$apiKey - - + + Export the metadata of the current published version of a dataset in various formats see Note below:: GET http://$SERVER/api/datasets/export?exporter=ddi&persistentId=$persistentId - + Note: Supported exporters (export formats) are ddi, oai_ddi, dcterms, oai_dc, and dataverse_json. @@ -163,9 +163,9 @@ To revert to the default logic, use ``:publicationDate`` as the ``$datasetFieldT Note that the dataset field used has to be a date field:: PUT http://$SERVER/api/datasets/$id/citationdate?key=$apiKey - + Restores the default logic of the field type to be used as the citation date. Same as ``PUT`` with ``:publicationDate`` body:: - + DELETE http://$SERVER/api/datasets/$id/citationdate?key=$apiKey List all the role assignments at the given dataset:: @@ -185,7 +185,7 @@ Delete a Private URL from a dataset (if it exists):: DELETE http://$SERVER/api/datasets/$id/privateUrl?key=$apiKey Builtin Users -~~~~~ +~~~~~~~~~~~~~ This endopint deals with users of the built-in authentication provider. Note that users may come from other authentication services as well, such as Shibboleth. For this service to work, the setting ``BuiltinUsers.KEY`` has to be set, and its value passed as ``key`` to @@ -368,18 +368,29 @@ Toggles superuser mode on the ``AuthenticatedUser`` whose ``identifier`` (withou POST http://$SERVER/api/admin/superuser/$identifier +List all role assignments of a role assignee (i.e. a user or a group):: + + GET http://$SERVER/api/admin/assignments/assignees/$identifier + +Note that ``identifier`` can contain slashes (e.g. ``&ip/localhost-users``). + IpGroups ^^^^^^^^ -List all the ip groups:: +Lists all the ip groups:: GET http://$SERVER/api/admin/groups/ip -Adds a new ip group. POST data should specify the group in JSON format. Examples are available at ``data/ipGroup1.json``. :: +Adds a new ip group. POST data should specify the group in JSON format. Examples are available at the ``data`` folder. Using this method, an IP Group is always created, but its ``alias`` might be different than the one appearing in the +JSON file, to ensure it is unique. :: POST http://$SERVER/api/admin/groups/ip -Returns a the group in a JSON format. ``groupIdtf`` can either be the group id in the database (in case it is numeric), or the group alias. :: +Creates or updates the ip group ``$groupAlias``. :: + + POST http://$SERVER/api/admin/groups/ip/$groupAlias + +Returns a the group in a JSON format. ``$groupIdtf`` can either be the group id in the database (in case it is numeric), or the group alias. :: GET http://$SERVER/api/admin/groups/ip/$groupIdtf diff --git a/doc/sphinx-guides/source/api/search.rst b/doc/sphinx-guides/source/api/search.rst index 84f202a5b83..2f7090a327b 100755 --- a/doc/sphinx-guides/source/api/search.rst +++ b/doc/sphinx-guides/source/api/search.rst @@ -20,11 +20,11 @@ Parameters ============== ======= =========== Name Type Description ============== ======= =========== -q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://apitest.dataverse.org/api/search?q=title:data -type string Can be either "dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://apitest.dataverse.org/api/search?q=*&type=dataset -subtree string The identifier of the dataverse to which the search should be narrowed. The subtree of this dataverse and all its children will be searched. For example, https://apitest.dataverse.org/api/search?q=data&subtree=birds +q string The search term or terms. Using "title:data" will search only the "title" field. "*" can be used as a wildcard either alone or adjacent to a term (i.e. "bird*"). For example, https://demo.dataverse.org/api/search?q=title:data +type string Can be either "dataverse", "dataset", or "file". Multiple "type" parameters can be used to include multiple types (i.e. ``type=dataset&type=file``). If omitted, all types will be returned. For example, https://demo.dataverse.org/api/search?q=*&type=dataset +subtree string The identifier of the dataverse to which the search should be narrowed. The subtree of this dataverse and all its children will be searched. For example, https://demo.dataverse.org/api/search?q=data&subtree=birds sort string The sort field. Supported values include "name" and "date". See example under "order". -order string The order in which to sort. Can either be "asc" or "desc". For example, https://apitest.dataverse.org/api/search?q=data&sort=name&order=asc +order string The order in which to sort. Can either be "asc" or "desc". For example, https://demo.dataverse.org/api/search?q=data&sort=name&order=asc per_page int The number of results to return per request. The default is 10. The max is 1000. See :ref:`iteration example `. start int A cursor for paging through search results. See :ref:`iteration example `. show_relevance boolean Whether or not to show details of which fields were matched by the query. False by default. See :ref:`advanced search example `. @@ -35,7 +35,7 @@ fq string A filter query on the search term. Multiple "fq" parame Basic Search Example -------------------- -https://apitest.dataverse.org/api/search?q=trees +https://demo.dataverse.org/api/search?q=trees .. code-block:: json @@ -52,8 +52,8 @@ https://apitest.dataverse.org/api/search?q=trees { "name":"Trees", "type":"dataverse", - "url":"https://apitest.dataverse.org/dataverse/trees", - "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/7", + "url":"https://demo.dataverse.org/dataverse/trees", + "image_url":"https://demo.dataverse.org/api/access/dvCardImage/7", "identifier":"trees", "description":"A tree dataverse with some birds", "published_at":"2016-05-10T12:53:38Z" @@ -61,8 +61,8 @@ https://apitest.dataverse.org/api/search?q=trees { "name":"Chestnut Trees", "type":"dataverse", - "url":"https://apitest.dataverse.org/dataverse/chestnuttrees", - "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/9", + "url":"https://demo.dataverse.org/dataverse/chestnuttrees", + "image_url":"https://demo.dataverse.org/api/access/dvCardImage/9", "identifier":"chestnuttrees", "description":"A dataverse with chestnut trees and an oriole", "published_at":"2016-05-10T12:52:38Z" @@ -70,8 +70,8 @@ https://apitest.dataverse.org/api/search?q=trees { "name":"trees.png", "type":"file", - "url":"https://apitest.dataverse.org/api/access/datafile/12", - "image_url":"https://apitest.dataverse.org/api/access/fileCardImage/12", + "url":"https://demo.dataverse.org/api/access/datafile/12", + "image_url":"https://demo.dataverse.org/api/access/fileCardImage/12", "file_id":"12", "description":"", "published_at":"2016-05-10T12:53:39Z", @@ -84,8 +84,8 @@ https://apitest.dataverse.org/api/search?q=trees { "name":"Birds", "type":"dataverse", - "url":"https://apitest.dataverse.org/dataverse/birds", - "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/2", + "url":"https://demo.dataverse.org/dataverse/birds", + "image_url":"https://demo.dataverse.org/api/access/dvCardImage/2", "identifier":"birds", "description":"A bird dataverse with some trees", "published_at":"2016-05-10T12:57:27Z" @@ -100,7 +100,7 @@ https://apitest.dataverse.org/api/search?q=trees Advanced Search Example ----------------------- -https://apitest.dataverse.org/api/search?q=finch&show_relevance=true&show_facets=true&fq=publicationDate:2016&subtree=birds +https://demo.dataverse.org/api/search?q=finch&show_relevance=true&show_facets=true&fq=publicationDate:2016&subtree=birds In this example, ``show_relevance=true`` matches per field are shown. Available facets are shown with ``show_facets=true`` and of the facets is being used with ``fq=publication_date_s:2015``. The search is being narrowed to the dataverse with the identifier "birds" with the parameter ``subtree=birds``. @@ -118,8 +118,8 @@ In this example, ``show_relevance=true`` matches per field are shown. Available { "name":"Finches", "type":"dataverse", - "url":"https://apitest.dataverse.org/dataverse/finches", - "image_url":"https://apitest.dataverse.org/api/access/dvCardImage/3", + "url":"https://demo.dataverse.org/dataverse/finches", + "image_url":"https://demo.dataverse.org/api/access/dvCardImage/3", "identifier":"finches", "description":"A dataverse with finches", "published_at":"2016-05-10T12:57:38Z", @@ -145,7 +145,7 @@ In this example, ``show_relevance=true`` matches per field are shown. Available "name":"Darwin's Finches", "type":"dataset", "url":"http://dx.doi.org/10.5072/FK2/G2VPE7", - "image_url":"https://apitest.dataverse.org/api/access/dsCardImage/2", + "image_url":"https://demo.dataverse.org/api/access/dsCardImage/2", "global_id":"doi:10.5072/FK2/G2VPE7", "description": "Darwin's finches (also known as the Galápagos finches) are a group of about fifteen species of passerine birds.", "published_at":"2016-05-10T12:57:45Z", @@ -224,7 +224,7 @@ Be default, up to 10 results are returned with every request (though this can be #!/usr/bin/env python import urllib2 import json - base = 'https://apitest.dataverse.org' + base = 'https://demo.dataverse.org' rows = 10 start = 0 page = 1 diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index e9c09e0c61b..ca7b40d4dc6 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -14,6 +14,7 @@ import sys import os +from datetime import datetime sys.path.insert(0, os.path.abspath('../../')) import sphinx_bootstrap_theme @@ -56,16 +57,16 @@ # General information about the project. project = u'Dataverse' -copyright = u'2016, The President & Fellows of Harvard College' +copyright = u'%d, The President & Fellows of Harvard College' % datetime.now().year # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '4.5' +version = '4.5.1' # The full version, including alpha/beta/rc tags. -release = '4.5' +release = '4.5.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -358,7 +359,7 @@ epub_title = u'Dataverse' epub_author = u'Dataverse Team' epub_publisher = u'Dataverse Team' -epub_copyright = u'2014, Dataverse Team' +epub_copyright = u'%d, The President & Fellows of Harvard College' % datetime.now().year # The basename for the epub file. It defaults to the project name. #epub_basename = u'Consilience Documentation' diff --git a/doc/sphinx-guides/source/developers/dev-environment.rst b/doc/sphinx-guides/source/developers/dev-environment.rst index d3871380f52..532f2f75d7a 100755 --- a/doc/sphinx-guides/source/developers/dev-environment.rst +++ b/doc/sphinx-guides/source/developers/dev-environment.rst @@ -7,7 +7,7 @@ Development Environment Assumptions ----------- -This guide assumes you are using a Mac but we do have pages for :doc:`/developers/windows` and :doc:`/developers/ubuntu`. +This guide assumes you are using a Mac. If you are using Windows or Linux, please reach out to other developers at https://groups.google.com/forum/#!forum/dataverse-dev Requirements ------------ diff --git a/doc/sphinx-guides/source/developers/index.rst b/doc/sphinx-guides/source/developers/index.rst index 9225d00dcd6..a9daa12b16c 100755 --- a/doc/sphinx-guides/source/developers/index.rst +++ b/doc/sphinx-guides/source/developers/index.rst @@ -9,17 +9,14 @@ Developer Guide Contents: .. toctree:: - :maxdepth: 2 - - intro - dev-environment - branching-strategy - testing - documentation - debugging - coding-style - making-releases - tools - unf/index - + intro + dev-environment + branching-strategy + testing + documentation + debugging + coding-style + making-releases + tools + unf/index diff --git a/doc/sphinx-guides/source/developers/ubuntu.rst b/doc/sphinx-guides/source/developers/ubuntu.rst deleted file mode 100755 index 9204a6171eb..00000000000 --- a/doc/sphinx-guides/source/developers/ubuntu.rst +++ /dev/null @@ -1,51 +0,0 @@ -====== -Ubuntu -====== - -Requirements ------------- - -Tested on Ubuntu 14.04. - -Java 8 -~~~~~~ - -- ``sudo apt-get install openjdk-8-jdk openjdk-8-jre`` - - -Maven -~~~~~ - -- ``sudo apt-get install maven`` - - -Glassfish -~~~~~~~~~ - -- ``wget http://download.java.net/glassfish/4.1/release/glassfish-4.1.zip`` - -- ``unzip glassfish-4.1*zip`` - - -PostgreSQL -~~~~~~~~~~ - -- ``sudo apt-get install postgresql postgresql-contrib`` - - -jq -~~ - -- ``sudo apt-get install jq`` - - -Curl -~~~~ - -- ``sudo apt-get install curl`` - - -Recommendations and Dev Environment -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Please visit :doc:`/developers/dev-environment/` diff --git a/doc/sphinx-guides/source/developers/unf/index.rst b/doc/sphinx-guides/source/developers/unf/index.rst index 3980909fc0b..dc2f37d0ba9 100644 --- a/doc/sphinx-guides/source/developers/unf/index.rst +++ b/doc/sphinx-guides/source/developers/unf/index.rst @@ -1,17 +1,17 @@ .. _unf: -==================================== +===================================== Universal Numerical Fingerprint (UNF) -==================================== +===================================== Contents: .. toctree:: :maxdepth: 2 - unf-v3 - unf-v5 - unf-v6 + unf-v3 + unf-v5 + unf-v6 .. figure:: ./img/unf-diagram.png :align: center diff --git a/doc/sphinx-guides/source/developers/unf/unf-v5.rst b/doc/sphinx-guides/source/developers/unf/unf-v5.rst index 8606ff06ebc..4fb160c20ea 100644 --- a/doc/sphinx-guides/source/developers/unf/unf-v5.rst +++ b/doc/sphinx-guides/source/developers/unf/unf-v5.rst @@ -10,6 +10,3 @@ UNF Version 5 **To address this, the Project is about to release UNF Version 6. The release date is still being discussed. It may coincide with the release of Dataverse 4.0. Alternatively, the production version of DVN 3.6.3 may get upgraded to use UNF v6 prior to that. This will be announced shortly. In the process, we are solving another problem with UNF v5 - this time we've made an effort to offer very implementer-friendly documentation that describes the algorithm fully and unambiguously. So if you are interested in implementing your own version of a UNF calculator, (something we would like to encourage!) please proceed directly to the Version 6 documentation.** **Going forward, we are going to offer a preserved version of the Version 5 library and, possibly, an online UNF v5 calculator, for the purposes of validating vectors and data sets for which published Version 5 UNFs exist.** - ------ - diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst deleted file mode 100755 index 9ed6f10dcd5..00000000000 --- a/doc/sphinx-guides/source/developers/windows.rst +++ /dev/null @@ -1,5 +0,0 @@ -======= -Windows -======= - -Developers using Windows who have trouble setting up their :doc:`/developers/dev-environment/` should reach out to over Dataverse developers per https://github.com/IQSS/dataverse/blob/master/CONTRIBUTING.md diff --git a/doc/sphinx-guides/source/index.rst b/doc/sphinx-guides/source/index.rst index 1c3a88dcdef..8fd7056c032 100755 --- a/doc/sphinx-guides/source/index.rst +++ b/doc/sphinx-guides/source/index.rst @@ -3,10 +3,10 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Dataverse 4.5 Guides +Dataverse 4.5.1 Guides ====================== -These guides are for the most recent version of Dataverse. For the guides for **version 4.3.1** please go `here `_. +These guides are for the most recent version of Dataverse. For the guides for **version 4.5** please go `here `_. .. toctree:: :glob: diff --git a/doc/sphinx-guides/source/installation/administration.rst b/doc/sphinx-guides/source/installation/administration.rst index 6c665cbb650..59cf11652a3 100644 --- a/doc/sphinx-guides/source/installation/administration.rst +++ b/doc/sphinx-guides/source/installation/administration.rst @@ -72,6 +72,17 @@ User Administration There isn't much in the way of user administration tools built in to Dataverse. +Confirm Email ++++++++++++++ + +Dataverse encourages builtin/local users to verify their email address upon signup or email change so that sysadmins can be assured that users can be contacted. + +The app will send a standard welcome email with a URL the user can click, which, when activated, will store a ``lastconfirmed`` timestamp in the ``authenticateduser`` table of the database. Any time this is "null" for a user (immediately after signup and/or changing of their Dataverse email address), their current email on file is considered to not be verified. The link that is sent expires after a time (the default is 24 hours), but this is configurable by a superuser via the ``:MinutesUntilConfirmEmailTokenExpires`` config option. + +Should users' URL token expire, they will see a "Verify Email" button on the account information page to send another URL. + +Sysadmins can determine which users have verified their email addresses by looking for the presence of the value ``emailLastConfirmed`` in the JSON output from listing users (see the "Admin" section of the :doc:`/api/native-api`). The email addresses for Shibboleth users are re-confirmed on every login. + Deleting an API Token +++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 142e69ff3e3..2ebed65457f 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -90,7 +90,7 @@ Persistent identifiers are a required and integral part of the Dataverse platfor JVM Options: :ref:`doi.baseurlstring`, :ref:`doi.username`, :ref:`doi.password` -Database Settings: :ref:`:DoiProvider`, :ref:`:Protocol`, :ref:`:Authority`, :ref:`:DoiSeparator` +Database Settings: :ref:`:DoiProvider <:DoiProvider>`, :ref:`:Protocol <:Protocol>`, :ref:`:Authority <:Authority>`, :ref:`:DoiSeparator <:DoiSeparator>` Please note that any datasets creating using the test configuration cannot be directly migrated and would need to be created again once a valid DOI namespace is configured. @@ -104,8 +104,9 @@ Once this configuration is complete, your Dataverse installation should be ready JVM Options ----------- -JVM stands Java Virtual Machine and as a Java application, Glassfish can read JVM options when it is started. A number of JVM options are configured by the installer below is a complete list of the Dataverse-specific JVM options. You can inspect the configured options by running ``asadmin list-jvm-options | egrep 'dataverse|doi' -``. +JVM stands Java Virtual Machine and as a Java application, Glassfish can read JVM options when it is started. A number of JVM options are configured by the installer below is a complete list of the Dataverse-specific JVM options. You can inspect the configured options by running: + +``asadmin list-jvm-options | egrep 'dataverse|doi'`` When changing values these values with ``asadmin``, you'll need to delete the old value before adding a new one, like this: @@ -188,9 +189,11 @@ dataverse.dataAccess.thumbnail.pdf.limit For limiting the size of thumbnail images generated from files. +.. _doi.baseurlstring: + doi.baseurlstring +++++++++++++++++ -.. _doi.baseurlstring: + As of this writing "https://ezid.cdlib.org" and "https://mds.datacite.org" are the only valid values. See also these related database settings below: - :DoiProvider @@ -198,14 +201,18 @@ As of this writing "https://ezid.cdlib.org" and "https://mds.datacite.org" are t - :Authority - :DoiSeparator +.. _doi.username: + doi.username ++++++++++++ -.. _doi.username: + Used in conjuction with ``doi.baseurlstring``. +.. _doi.password: + doi.password ++++++++++++ -.. _doi.password: + Used in conjuction with ``doi.baseurlstring``. dataverse.handlenet.admcredfile @@ -265,30 +272,45 @@ This is the email address that "system" emails are sent from such as password re ``curl -X PUT -d "Support " http://localhost:8080/api/admin/settings/:SystemEmail`` +:FooterCopyright +++++++++++++++++ + +By default the footer says "Copyright © [YYYY]" but you can add text after the year, as in the example below. + +``curl -X PUT -d ", The President & Fellows of Harvard College" http://localhost:8080/api/admin/settings/:FooterCopyright`` + +.. _:DoiProvider: + :DoiProvider ++++++++++++ -.. _:DoiProvider: + As of this writing "EZID" and "DataCite" are the only valid options. ``curl -X PUT -d EZID http://localhost:8080/api/admin/settings/:DoiProvider`` +.. _:Protocol: + :Protocol +++++++++ -.. _:Protocol: + As of this writing "doi" is the only valid option for the protocol for a persistent ID. ``curl -X PUT -d doi http://localhost:8080/api/admin/settings/:Protocol`` +.. _:Authority: + :Authority ++++++++++ -.. _:Authority: + Use the DOI authority assigned to you by your DoiProvider. ``curl -X PUT -d 10.xxxx http://localhost:8080/api/admin/settings/:Authority`` +.. _:DoiSeparator: + :DoiSeparator +++++++++++++ -.. _:DoiSeparator: + It is recommended that you keep this as a slash ("/"). ``curl -X PUT -d "/" http://localhost:8080/api/admin/settings/:DoiSeparator`` @@ -370,7 +392,9 @@ Limit the number of files in a zip that Dataverse will accept. :GoogleAnalyticsCode ++++++++++++++++++++ -For setting up Google Analytics for your Dataverse installation. +Set your Google Analytics Tracking ID thusly: + +``curl -X PUT -d 'trackingID' http://localhost:8080/api/admin/settings/:GoogleAnalyticsCode`` :SolrHostColonPort ++++++++++++++++++ @@ -392,7 +416,7 @@ The relative path URL to which users will be sent after signup. The default sett The location of your TwoRavens installation. Activation of TwoRavens also requires the setting below, ``TwoRavensTabularView`` :TwoRavensTabularView -+++++++++++++++++++ ++++++++++++++++++++++ Set ``TwoRavensTabularView`` to true to allow a user to view tabular files via the TwoRavens application. This boolean affects whether a user will see the "Explore" button. @@ -445,6 +469,11 @@ Set ``SearchHighlightFragmentSize`` to override the default value of 100 from ht Allow for migration of non-conformant data (especially dates) from DVN 3.x to Dataverse 4. +:MinutesUntilConfirmEmailTokenExpires ++++++++++++++++++++++++++++++++++++++ + +The duration in minutes before "Confirm Email" URLs expire. The default is 1440 minutes (24 hours). See also :doc:`/installation/administration`. + :ShibEnabled ++++++++++++ @@ -454,3 +483,21 @@ This setting is experimental per :doc:`/installation/shibboleth`. ++++++++++++ Set to false to disallow local accounts to be created if you are using :doc:`shibboleth` but not for production use until https://github.com/IQSS/dataverse/issues/2838 has been fixed. + +:PiwikAnalyticsId +++++++++++++++++++++ + +Site identifier created in your Piwik instance. Example: + +``curl -X PUT -d 42 http://localhost:8080/api/admin/settings/:PiwikAnalyticsId`` + +:PiwikAnalyticsHost +++++++++++++++++++++ + +Host FQDN or URL of your Piwik instance before the ``/piwik.php``. Examples: + +``curl -X PUT -d stats.domain.tld http://localhost:8080/api/admin/settings/:PiwikAnalyticsHost`` + +or + +``curl -X PUT -d hostname.domain.tld/stats http://localhost:8080/api/admin/settings/:PiwikAnalyticsHost`` diff --git a/doc/sphinx-guides/source/installation/index.rst b/doc/sphinx-guides/source/installation/index.rst index ba2992c5ec4..b418370f908 100755 --- a/doc/sphinx-guides/source/installation/index.rst +++ b/doc/sphinx-guides/source/installation/index.rst @@ -9,15 +9,13 @@ Installation Guide Contents: .. toctree:: - :titlesonly: - :maxdepth: 2 - intro - prep - prerequisites - installation-main - config - administration - upgrading - r-rapache-tworavens - shibboleth + intro + prep + prerequisites + installation-main + config + administration + upgrading + r-rapache-tworavens + shibboleth diff --git a/doc/sphinx-guides/source/installation/installer-script.rst b/doc/sphinx-guides/source/installation/installer-script.rst deleted file mode 100644 index 72881587917..00000000000 --- a/doc/sphinx-guides/source/installation/installer-script.rst +++ /dev/null @@ -1 +0,0 @@ -This content has been moved to :doc:`/installation/installation-main`. diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 9530860dce9..4a30c5b9a12 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -98,7 +98,7 @@ The standard init script that ships RHEL 6 and similar should work fine. Enable Configuring Database Access for the Dataverse Application (and the Dataverse Installer) -===================================================================================== +======================================================================================= - The application and the installer script will be connecting to PostgreSQL over TCP/IP, using password authentication. In this section we explain how to configure PostgreSQL to accept these connections. diff --git a/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst b/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst index 44cd29570b9..a88ffa114d2 100644 --- a/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst +++ b/doc/sphinx-guides/source/installation/r-rapache-tworavens.rst @@ -5,8 +5,8 @@ R, rApache and TwoRavens Eventually, this document may be split into several parts, dedicated to individual components - such as R, rApache and the TwoRavens applications. Particularly, if the TwoRavens team creates an "official" distribution with their own installation manual. -0. PREREQUISITS -+++++++++++++++ +0. PREREQUISITES +++++++++++++++++ a. httpd (Apache): ------------------ @@ -43,6 +43,13 @@ yum install R R-devel (EPEL distribution recommended; version 3.* required; 3.1.* recommended as of writing this) +To pick up any needed dependencies, CentOS users may simply install the epel-release RPM. + +RHEL users will want to log in to their organization's respective RHN interface, find the particular machine in question and: + +• click on "Subscribed Channels: Alter Channel Subscriptions" +• enable EPEL, Server Extras, Server Optional + c. rApache: ----------- @@ -57,19 +64,14 @@ If you are using RHEL/CentOS 7, you can `download an experimental rapache-1.2.7- rpm -ivh rapache-1.2.7-rpm0.x86_64.rpm -d. Install libcurl-devel: -------------------------- - -(provides /usr/bin/curl-config, needed by some 3rd-party R packages; package installation *will fail silently* if it's not found!): - -``yum install libcurl-devel`` +d. Install system depencies: +---------------------------- -Make sure you have the standard GNU compilers installed (needed for 3rd-party R packages to build themselves). +The r-setup.sh script launches child processes which log to RINSTALL.* files. Once the script exits, search these files for the word "error" and be sure to install any missing dependencies and run the script again. At present, at minimum it needs: -**Update**: As of Aug. 4 2015, it appears the following rpms had to be installed: +``yum install libcurl-devel openssl-devel libxml2-devel ed libX11-devel libpng-devel mesa-libGL-devel mesa-libGLU-devel libpqxx-devel`` -``yum install openssl-devel`` -``yum install xml2-devel`` +Make sure you have the standard GNU compilers installed (needed for 3rd-party R packages to build themselves). CentOS 6 users will need gcc-fortran 4.6 or greater, available from the CentOS devtools repo. Again, without these rpms, R package devtools was failing to install, silently or with a non-informative error message. Note: this package ``devtools`` has proven to be very flaky; it is being very actively maintained, new dependencies are being constantly added and new bugs introduced... however, it is only needed to install the package ``Zelig``, the main R workhorse behind TwoRavens. It cannot be installed from CRAN, like all the other 3rd party packages we use - becase TwoRavens requires version 5, which is still in beta. So devtools is needed to build it from sources downloaded directly from github. Once Zelig 5 is released, we'll be able to drop the requirement for devtools - and that will make this process much simpler. For now, be prepared for it to be somewhat of an adventure. @@ -82,7 +84,7 @@ R is used both by the Dataverse application, directly, and the TwoRavens compani Two distinct interfaces are used to access R: Dataverse uses Rserve; and TwoRavens sends jobs to R running under rApache using Rook interface. -We provide a shell script (``conf/R/r-setup.sh`` in the Dataverse source tree; you will need the other 3 files in that directory as well - `https://github.com/IQSS/dataverse/conf/R/ `__) that will attempt to install the required 3rd party packages; it will also configure Rserve and rserve user. rApache configuration will be addressed in its own section. +We provide a shell script (``conf/R/r-setup.sh`` in the Dataverse source tree; you will need the other 3 files in that directory as well - `https://github.com/IQSS/dataverse/tree/master/conf/R `__) that will attempt to install the required 3rd party packages; it will also configure Rserve and rserve user. rApache configuration will be addressed in its own section. The script will attempt to download the packages from CRAN (or a mirror) and GitHub, so the system must have access to the internet. On a server fully firewalled from the world, packages can be installed from downloaded sources. This is left as an exercise for the reader. Consult the script for insight. @@ -192,7 +194,7 @@ Note that some of these packages have their own dependencies, and additional ins install.pl script: ++++++++++++++++++ -I. Configure the TwoRavens web (Javascript) application. +I. Configure the TwoRavens web (Javascript) application ------------------------------------------------------- Edit the file ``/var/www/html/dataexplore/app_ddi.js``. diff --git a/doc/sphinx-guides/source/user/data-exploration/index.rst b/doc/sphinx-guides/source/user/data-exploration/index.rst index b6be872249c..708f774bb46 100755 --- a/doc/sphinx-guides/source/user/data-exploration/index.rst +++ b/doc/sphinx-guides/source/user/data-exploration/index.rst @@ -9,10 +9,6 @@ Data Exploration Guide Contents: .. toctree:: - :titlesonly: - :maxdepth: 2 - - tworavens - worldmap - + tworavens + worldmap diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 0a6200583e5..d0259286af9 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -89,8 +89,8 @@ For example, if these files were included within a .zip, the “Map Data” butt * subway_line.dbf Once you publish your dataset with your shape files, you will be able to use the "Map Data" button using `GeoConnect `_ to visualize and manipulate these files -for users to Explore this geospatial data using the `WorldMap `_ interface. -Please note: In order to map your data file, a copy will be sent to Harvard's `WorldMap `_ platform. You have the ability to delete any maps, and associated data, from the Harvard WorldMap platform, at any time. +for users to Explore this geospatial data using the `WorldMap `__ interface. +Please note: In order to map your data file, a copy will be sent to Harvard's `WorldMap `__ platform. You have the ability to delete any maps, and associated data, from the Harvard WorldMap platform, at any time. Astronomy (FITS) -------------------- @@ -222,7 +222,7 @@ The file permissions page has two sections: Users/Groups and Files. To give someone access to your restricted files, click on the Grant Access to Users/Groups button in the Users/Groups section. -.. _widgets: +.. _dataset-widgets: Widgets ============================= @@ -310,7 +310,8 @@ a file, your dataset will automatically be bumped up to a major version (example |image3| -**Dataset Versions Tab** +Version Details +------------------------------------- To view what has exactly changed starting from the originally published version to any subsequent published versions: click on the Versions tab on the dataset page to see all versions and changes made for that particular dataset. Once you have more than one version (can be version 1 and a draft), you can click the Show Details link in the Versions tab to learn more about the metadata fields and files that were either added or edited. @@ -334,6 +335,6 @@ If you deaccession the most recently published version of the dataset but not al :class: img-responsive .. |image2| image:: ./img/data-download.png :class: img-responsive -.. |image3| image:: http://static.projects.iq.harvard.edu/files/styles/os_files_xxlarge/public/datascience/files/data_publishing_version_workflow.png?itok=8Z0PM-QC +.. |image3| image:: ./img/data_publishing_version_workflow.png :class: img-responsive diff --git a/doc/sphinx-guides/source/user/dataverse-management.rst b/doc/sphinx-guides/source/user/dataverse-management.rst index 6d45a055ecf..bac42305119 100755 --- a/doc/sphinx-guides/source/user/dataverse-management.rst +++ b/doc/sphinx-guides/source/user/dataverse-management.rst @@ -35,12 +35,13 @@ Edit Dataverse To edit your dataverse, navigate to your dataverse homepage and select the "Edit Dataverse" button, where you will be presented with the following editing options: -- :ref:`General Information ` : edit name, identifier, category, contact email, affiliation, description, Metadata Elements, and facets for your dataverse. -- :ref:`Theme + Widgets ` : upload a logo for your dataverse, add a link to your department or personal website, and select colors for your dataverse in order to brand it. Also, you can get code to add to your website to have your dataverse display on it. -- :ref:`Permissions ` : give Dataverse users permissions to your dataverse, i.e.-can edit datasets, and see which users already have which permissions for your dataverse -- :ref:`Dataset Templates ` : these are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in -- :ref:`Dataset Guestbooks ` : allows you to collect data about who is downloading the files from your datasets -- :ref:`Featured Dataverses ` : if you have one or more dataverses, you can use this option to show them at the top of your dataverse page to help others easily find interesting or important dataverses +- :ref:`General Information `: edit name, identifier, category, contact email, affiliation, description, Metadata Elements, and facets for your dataverse +- :ref:`Theme `: upload a logo for your dataverse, add a link to your department or personal website, and select colors for your dataverse in order to brand it +- :ref:`Widgets `: get code to add to your website to have your dataverse display on it +- :ref:`Permissions `: give Dataverse users permissions to your dataverse, i.e.-can edit datasets, and see which users already have which permissions for your dataverse +- :ref:`Dataset Templates `: these are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in +- :ref:`Dataset Guestbooks `: allows you to collect data about who is downloading the files from your datasets +- :ref:`Featured Dataverses `: if you have one or more dataverses, you can use this option to show them at the top of your dataverse page to help others easily find interesting or important dataverses - **Delete Dataverse**: you are able to delete your dataverse as long as it is not published and does not have any draft datasets .. _general-information: @@ -52,14 +53,14 @@ The General Information page is how you edit the information you filled in while Tip: The metadata fields you select as required, will appear on the Create Dataset form when someone goes to add a dataset to the dataverse. -.. _widgets: +.. _theme: Theme ==================================================== The Theme feature provides you with a way to customize the look of your dataverse. You can decide either to use the customization from the dataverse above yours or upload your own image file. Supported image types are JPEG, TIFF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. Additionally, you can select the colors for the header of your dataverse and the text that appears in your dataverse. You can also add a link to your personal website, the website for your organization or institution, your department, journal, etc. -.. _widgets: +.. _dataverse-widgets: Widgets ================================================= diff --git a/doc/sphinx-guides/source/user/img/data_publishing_version_workflow.png b/doc/sphinx-guides/source/user/img/data_publishing_version_workflow.png new file mode 100644 index 00000000000..6ef11f31750 Binary files /dev/null and b/doc/sphinx-guides/source/user/img/data_publishing_version_workflow.png differ diff --git a/doc/sphinx-guides/source/user/index.rst b/doc/sphinx-guides/source/user/index.rst index 604ad4c2071..9d231cb5f6d 100755 --- a/doc/sphinx-guides/source/user/index.rst +++ b/doc/sphinx-guides/source/user/index.rst @@ -9,12 +9,11 @@ User Guide Contents: .. toctree:: - :maxdepth: 2 - account - find-use-data - dataverse-management - dataset-management - tabulardataingest/index - data-exploration/index - appendix + account + find-use-data + dataverse-management + dataset-management + tabulardataingest/index + data-exploration/index + appendix diff --git a/doc/sphinx-guides/source/user/super-user.rst b/doc/sphinx-guides/source/user/super-user.rst deleted file mode 100755 index 3586d0ea827..00000000000 --- a/doc/sphinx-guides/source/user/super-user.rst +++ /dev/null @@ -1,6 +0,0 @@ -Super User -+++++++++++++++++++++++ - -[Note: Documentation to be added about features available for super admins of -the Dataverse, which provides several options for configuring and -customizing your application.] diff --git a/doc/sphinx-guides/source/user/tabulardataingest/index.rst b/doc/sphinx-guides/source/user/tabulardataingest/index.rst index 0ca316502e7..a190710bdab 100755 --- a/doc/sphinx-guides/source/user/tabulardataingest/index.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/index.rst @@ -9,15 +9,11 @@ Tabular Data File Ingest Contents: .. toctree:: - :titlesonly: - :maxdepth: 2 - - supportedformats - ingestprocess - spss - stata - rdata - excel - csv - + supportedformats + ingestprocess + spss + stata + rdata + excel + csv diff --git a/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst b/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst index b5d9311d603..ae2cc6cf7fe 100644 --- a/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst +++ b/doc/sphinx-guides/source/user/tabulardataingest/rdata.rst @@ -92,10 +92,10 @@ the latter reserved for longer, descriptive text. With variables ingested from R data frames the variable name will be used for both the "name" and the "label". -| *Optional R packages exist for providing descriptive variable labels; - in one of the future versions support may be added for such a - mechanism. It would of course work only for R files that were - created with such optional packages*. +*Optional R packages exist for providing descriptive variable labels; +in one of the future versions support may be added for such a +mechanism. It would of course work only for R files that were +created with such optional packages*. Similarly, R categorical values (factors) lack descriptive labels too. **Note:** This is potentially confusing, since R factors do @@ -132,7 +132,7 @@ value: unless the time zone was explicitly defined, R will adjust the value to the current time zone. The resulting behavior is often counter-intuitive: if you create a time value, for example: - timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS"); +``timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS");`` on a computer configured for the San Francisco time zone, the value will be differently displayed on computers in different time zones; @@ -143,9 +143,11 @@ If it is important that the values are always displayed the same way, regardless of the current time zones, it is recommended that the time zone is explicitly defined. For example: - attr(timevalue,"tzone")<-"PST" +``attr(timevalue,"tzone")<-"PST"`` + or - timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST"); + +``timevalue<-as.POSIXct("03/19/2013 12:57:00", format = "%m/%d/%Y %H:%M:%OS", tz="PST");`` Now the value will always be displayed as "15:57 PST", regardless of the time zone that is current for the OS ... **BUT ONLY** if the OS @@ -189,7 +191,7 @@ wasn't defined explicitly, it implicitly becomes a time value in the "UTC" zone!), this means that it is **impossible** to have 2 time value vectors, in Stata/SPSS and R, that produce the same UNF. -| **A pro tip:** if it is important to produce SPSS/Stata and R versions of +**A pro tip:** if it is important to produce SPSS/Stata and R versions of the same data set that result in the same UNF when ingested, you may define the time variables as **strings** in the R data frame, and use the "YYYY-MM-DD HH:mm:ss" formatting notation. This is the formatting used by the UNF @@ -198,4 +200,4 @@ the same UNF as the vector of the same time values in Stata. Note: date values (dates only, without time) should be handled the exact same way as those in SPSS and Stata, and should produce the same -UNFs. \ No newline at end of file +UNFs. diff --git a/doc/sphinx-guides/source/user/tabulardataingest/stata.rst b/doc/sphinx-guides/source/user/tabulardataingest/stata.rst new file mode 100644 index 00000000000..764bc815a2f --- /dev/null +++ b/doc/sphinx-guides/source/user/tabulardataingest/stata.rst @@ -0,0 +1,9 @@ +Stata +++++++++ + +Of all the third party statistical software providers, Stata does the best job at documenting the internal format of their files, by far. And at making that documentation freely and easily available to developers (yes, we are looking at you, SPSS). Because of that, Stata is the best supported format for tabular data ingest. + + +**New in Dataverse 4.0:** support for Stata v.13 has been added. + + diff --git a/doc/sphinx_bootstrap_theme/bootstrap/layout.html b/doc/sphinx_bootstrap_theme/bootstrap/layout.html index 3478e807b30..2193b142bb9 100755 --- a/doc/sphinx_bootstrap_theme/bootstrap/layout.html +++ b/doc/sphinx_bootstrap_theme/bootstrap/layout.html @@ -133,7 +133,7 @@ {%- if hasdoc('copyright') %} {% trans path=pathto('copyright'), copyright=copyright|e %}© Copyright {{ copyright }}.{% endtrans %}
{%- else %} - {% trans copyright=copyright|e %}Data Science at The Institute for Quantitative Social Science  |  Code available at Dataverse on GitHub  |  Created using Sphinx {{ sphinx_version }}
Version {{ version }}  |  Last updated on {{ last_updated }}
© Copyright {{ copyright }} {% endtrans %}
+ {% trans copyright=copyright|e %}Data Science at The Institute for Quantitative Social Science  |  Code available at Dataverse on GitHub  |  Created using Sphinx {{ sphinx_version }}
Version {{ version }}  |  Last updated on {{ last_updated }}
© Copyright {{ copyright }} {% endtrans %}
{%- endif %} {%- endif %}

diff --git a/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png b/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png index 468574c26a9..65581c832f1 100644 Binary files a/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png and b/doc/sphinx_bootstrap_theme/bootstrap/static/images/githubicon.png differ diff --git a/pom.xml b/pom.xml index 56edef4f9d8..7812b866812 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ edu.harvard.iq dataverse - 4.5 + 4.5.1 war dataverse @@ -41,7 +41,7 @@ dataone.org - http://dev-testing.dataone.org/maven + http://maven.dataone.org true diff --git a/scripts/api/data/ipGroup-all-ipv4.json b/scripts/api/data/ipGroup-all-ipv4.json new file mode 100644 index 00000000000..c5ff32def44 --- /dev/null +++ b/scripts/api/data/ipGroup-all-ipv4.json @@ -0,0 +1,5 @@ +{ + "alias":"all-ipv4", + "name":"IP group to match all IPv4 addresses", + "ranges" : [["0.0.0.0", "255.255.255.255"]] +} diff --git a/scripts/api/data/ipGroup3.json b/scripts/api/data/ipGroup-all.json similarity index 100% rename from scripts/api/data/ipGroup3.json rename to scripts/api/data/ipGroup-all.json diff --git a/scripts/api/data/ipGroup-localhost.json b/scripts/api/data/ipGroup-localhost.json index 4a5f7facfef..4f8d2f708b2 100644 --- a/scripts/api/data/ipGroup-localhost.json +++ b/scripts/api/data/ipGroup-localhost.json @@ -1,6 +1,5 @@ { "alias":"localhost", "name":"Localhost connections", - "ranges" : [["127.0.0.1", "127.0.0.1"], - ["::1", "::1"]] + "addresses": [ "::1", "127.0.0.1" ] } diff --git a/scripts/api/data/ipGroup-single-IPv4.json b/scripts/api/data/ipGroup-single-IPv4.json new file mode 100644 index 00000000000..515c512bcd1 --- /dev/null +++ b/scripts/api/data/ipGroup-single-IPv4.json @@ -0,0 +1,5 @@ +{ + "alias":"singleIPv4", + "name":"Single IPv4", + "addresses" : ["128.0.0.7"] +} diff --git a/scripts/api/data/ipGroup-single-IPv6.json b/scripts/api/data/ipGroup-single-IPv6.json new file mode 100644 index 00000000000..73eaa8e60a1 --- /dev/null +++ b/scripts/api/data/ipGroup-single-IPv6.json @@ -0,0 +1,5 @@ +{ + "alias":"singleIPv6", + "name":"Single IPv6", + "addresses" : ["aa:bb:cc:dd:ee:ff::1"] +} diff --git a/scripts/api/data/ipGroupDuplicate-v1.json b/scripts/api/data/ipGroupDuplicate-v1.json new file mode 100644 index 00000000000..eda0c8eb49b --- /dev/null +++ b/scripts/api/data/ipGroupDuplicate-v1.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup-dup", + "name":"IP Group with duplicate files (1)", + "description":"This is the FIRST version of the group", + "ranges" : [["60.0.0.0", "60.0.0.255"], + ["60::1", "60::ffff"]] +} diff --git a/scripts/api/data/ipGroupDuplicate-v2.json b/scripts/api/data/ipGroupDuplicate-v2.json new file mode 100644 index 00000000000..8db88e97fe7 --- /dev/null +++ b/scripts/api/data/ipGroupDuplicate-v2.json @@ -0,0 +1,7 @@ +{ + "alias":"ipGroup-dup", + "name":"IP Group with duplicate files-v2", + "description":"This is the second version of the group", + "ranges" : [["70.0.0.0", "70.0.0.255"], + ["70::1", "70::ffff"]] +} diff --git a/scripts/api/setup-optional-harvard.sh b/scripts/api/setup-optional-harvard.sh index 52caa31c1e0..3433e823014 100755 --- a/scripts/api/setup-optional-harvard.sh +++ b/scripts/api/setup-optional-harvard.sh @@ -22,6 +22,7 @@ curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectCreateEditMaps" curl -s -X PUT -d true "$SERVER/admin/settings/:GeoconnectViewMaps" echo "- Setting system email" curl -X PUT -d "Dataverse Support " http://localhost:8080/api/admin/settings/:SystemEmail +curl -X PUT -d ", The President & Fellows of Harvard College" http://localhost:8080/api/admin/settings/:FooterCopyright echo "- Setting up the Harvard Shibboleth institutional group" curl -s -X POST -H 'Content-type:application/json' --upload-file data/shibGroupHarvard.json "$SERVER/admin/groups/shib?key=$adminKey" echo diff --git a/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql b/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql new file mode 100644 index 00000000000..6296fca8a5f --- /dev/null +++ b/scripts/database/upgrades/upgrade_v4.5_to_v4.5.1.sql @@ -0,0 +1 @@ +ALTER TABLE authenticateduser ADD COLUMN emailconfirmed timestamp without time zone; diff --git a/scripts/deploy/apitest.dataverse.org/deploy b/scripts/deploy/apitest.dataverse.org/deploy deleted file mode 100755 index bf27864daaf..00000000000 --- a/scripts/deploy/apitest.dataverse.org/deploy +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh -scripts/deploy/apitest.dataverse.org/prep -sudo /home/jenkins/dataverse/scripts/deploy/apitest.dataverse.org/rebuild -scripts/deploy/apitest.dataverse.org/post diff --git a/scripts/deploy/apitest.dataverse.org/dv-root.json b/scripts/deploy/apitest.dataverse.org/dv-root.json deleted file mode 100644 index 8d0b4ecab19..00000000000 --- a/scripts/deploy/apitest.dataverse.org/dv-root.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "alias": "root", - "name": "API Test", - "permissionRoot": false, - "facetRoot": true, - "description": "Welcome! This is a playground for Dataverse API users. (Data will be deleted periodically.) Please see http://guides.dataverse.org/en/latest/api to get started and http://community.dataverse.org/community-groups/api.html to join the community!", - "dataverseSubjects": [ - "Other" - ], - "dataverseContacts": [ - { - "contactEmail": "root@mailinator.com" -} - ] -} diff --git a/scripts/deploy/apitest.dataverse.org/post b/scripts/deploy/apitest.dataverse.org/post deleted file mode 100755 index 9b29be3a408..00000000000 --- a/scripts/deploy/apitest.dataverse.org/post +++ /dev/null @@ -1,13 +0,0 @@ -#/bin/sh -cd scripts/api -./setup-all.sh | tee /tmp/setup-all.sh.out -cd ../.. -psql -U dvnapp dvndb -f scripts/database/reference_data.sql -scripts/search/tests/publish-dataverse-root -git checkout scripts/api/data/dv-root.json -scripts/search/tests/grant-authusers-add-on-root -scripts/search/populate-users -scripts/search/create-users -scripts/search/tests/create-all-and-test -scripts/search/tests/publish-spruce1-and-test -java -jar downloads/schemaSpy_5.0.0.jar -t pgsql -host localhost -db dvndb -u postgres -p secret -s public -dp scripts/installer/pgdriver/postgresql-9.1-902.jdbc4.jar -o /usr/local/glassfish4/glassfish/domains/domain1/docroot/guides/developers/database/schemaspy diff --git a/scripts/deploy/apitest.dataverse.org/prep b/scripts/deploy/apitest.dataverse.org/prep deleted file mode 100755 index b7c181357b2..00000000000 --- a/scripts/deploy/apitest.dataverse.org/prep +++ /dev/null @@ -1,2 +0,0 @@ -#/bin/bash -x -cp scripts/deploy/apitest.dataverse.org/dv-root.json scripts/api/data/dv-root.json diff --git a/scripts/deploy/apitest.dataverse.org/rebuild b/scripts/deploy/apitest.dataverse.org/rebuild deleted file mode 100755 index aca38a56ab4..00000000000 --- a/scripts/deploy/apitest.dataverse.org/rebuild +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh -/usr/local/glassfish4/glassfish/bin/asadmin undeploy dataverse-4.0 -/usr/local/glassfish4/glassfish/bin/asadmin stop-domain -rm -rf /usr/local/glassfish4/glassfish/domains/domain1/files -psql -U dvnapp -c 'DROP DATABASE "dvndb"' template1 -echo $? -curl http://localhost:8983/solr/update/json?commit=true -H "Content-type: application/json" -X POST -d "{\"delete\": { \"query\":\"*:*\"}}" -psql -U dvnapp -c 'CREATE DATABASE "dvndb" WITH OWNER = "dvnapp"' template1 -echo $? -/usr/local/glassfish4/glassfish/bin/asadmin start-domain -/usr/local/glassfish4/glassfish/bin/asadmin deploy /tmp/dataverse-4.0.war diff --git a/scripts/deploy/apitest.dataverse.org/cert.md b/scripts/deploy/phoenix.dataverse.org/cert.md similarity index 67% rename from scripts/deploy/apitest.dataverse.org/cert.md rename to scripts/deploy/phoenix.dataverse.org/cert.md index 3b8084825d5..d68910fa15c 100644 --- a/scripts/deploy/apitest.dataverse.org/cert.md +++ b/scripts/deploy/phoenix.dataverse.org/cert.md @@ -1,13 +1,13 @@ Note that `-sha256` is used but the important thing is making sure SHA-1 is not selected when uploading the CSR to https://cert-manager.com/customer/InCommon - openssl genrsa -out apitest.dataverse.org.key 2048 + openssl genrsa -out phoenix.dataverse.org.key 2048 - openssl req -new -sha256 -key apitest.dataverse.org.key -out apitest.dataverse.org.csr + openssl req -new -sha256 -key phoenix.dataverse.org.key -out phoenix.dataverse.org.csr Country Name (2 letter code) [XX]:US State or Province Name (full name) []:Massachusetts Locality Name (eg, city) [Default City]:Cambridge Organization Name (eg, company) [Default Company Ltd]:Harvard College Organizational Unit Name (eg, section) []:IQSS - Common Name (eg, your name or your server's hostname) []:apitest.dataverse.org + Common Name (eg, your name or your server's hostname) []:phoenix.dataverse.org Email Address []:support@dataverse.org diff --git a/scripts/deploy/phoenix.dataverse.org/rebuild b/scripts/deploy/phoenix.dataverse.org/rebuild index b59a46f4466..ca92ef59b9e 100755 --- a/scripts/deploy/phoenix.dataverse.org/rebuild +++ b/scripts/deploy/phoenix.dataverse.org/rebuild @@ -5,6 +5,8 @@ OLD_WAR=$(echo $LIST_APP | awk '{print $1}') NEW_WAR=/tmp/dataverse.war /usr/local/glassfish4/glassfish/bin/asadmin undeploy $OLD_WAR /usr/local/glassfish4/glassfish/bin/asadmin stop-domain +# blow away "generated" directory to avoid EJB Timer Service is not available" https://github.com/IQSS/dataverse/issues/3336 +rm -rf /usr/local/glassfish4/glassfish/domains/domain1/generated rm -rf /usr/local/glassfish4/glassfish/domains/domain1/files #psql -U postgres -c "CREATE ROLE dvnapp UNENCRYPTED PASSWORD 'secret' SUPERUSER CREATEDB CREATEROLE INHERIT LOGIN" template1 psql -U dvnapp -c 'DROP DATABASE "dvndb"' template1 diff --git a/scripts/issues/1380/01-add.localhost.sh b/scripts/issues/1380/01-add.localhost.sh new file mode 100755 index 00000000000..331011d5fa2 --- /dev/null +++ b/scripts/issues/1380/01-add.localhost.sh @@ -0,0 +1,2 @@ +# Add the localhost group to the system. +curl -X POST -H"Content-Type:application/json" -d@../../api/data/ipGroup-localhost.json localhost:8080/api/admin/groups/ip diff --git a/scripts/issues/1380/02-build-dv-structure.sh b/scripts/issues/1380/02-build-dv-structure.sh new file mode 100755 index 00000000000..f0936e3cf69 --- /dev/null +++ b/scripts/issues/1380/02-build-dv-structure.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +echo Run this after running setup-users.sh, and making Pete an +echo admin on the root dataverse. + + +PETE=$(grep :result: users.out | grep Pete | cut -f4 -d: | tr -d \ ) +UMA=$(grep :result: users.out | grep Uma | cut -f4 -d: | tr -d \ ) + +pushd ../../api +./setup-dvs.sh $PETE $UMA +popd diff --git a/scripts/issues/1380/add-ip-group.sh b/scripts/issues/1380/add-ip-group.sh new file mode 100755 index 00000000000..2fba944807c --- /dev/null +++ b/scripts/issues/1380/add-ip-group.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# Add the passed group to the system. +curl -X POST -H"Content-Type:application/json" -d@../../api/data/$1 localhost:8080/api/admin/groups/ip diff --git a/scripts/issues/1380/add-user b/scripts/issues/1380/add-user new file mode 100755 index 00000000000..1781181bb79 --- /dev/null +++ b/scripts/issues/1380/add-user @@ -0,0 +1,3 @@ +#!/bin/bash +# add-user dv group user api-token +curl -H "Content-type:application/json" -X POST -d"[$3]" localhost:8080/api/dataverses/$1/groups/$2/roleAssignees?key=$4 diff --git a/scripts/issues/1380/data/3-eg1.json b/scripts/issues/1380/data/3-eg1.json new file mode 100644 index 00000000000..a874d69a2e8 --- /dev/null +++ b/scripts/issues/1380/data/3-eg1.json @@ -0,0 +1 @@ +["&explicit/3-eg1"] diff --git a/scripts/issues/1380/data/guest.json b/scripts/issues/1380/data/guest.json new file mode 100644 index 00000000000..3e4188a7167 --- /dev/null +++ b/scripts/issues/1380/data/guest.json @@ -0,0 +1 @@ +[":guest"] diff --git a/scripts/issues/1380/data/locals.json b/scripts/issues/1380/data/locals.json new file mode 100644 index 00000000000..8bb5e3e4162 --- /dev/null +++ b/scripts/issues/1380/data/locals.json @@ -0,0 +1 @@ +["&ip/localhost"] diff --git a/scripts/issues/1380/data/pete.json b/scripts/issues/1380/data/pete.json new file mode 100644 index 00000000000..298e813d2bc --- /dev/null +++ b/scripts/issues/1380/data/pete.json @@ -0,0 +1 @@ +["@pete"] diff --git a/scripts/issues/1380/data/uma.json b/scripts/issues/1380/data/uma.json new file mode 100644 index 00000000000..3caf8c5c9cc --- /dev/null +++ b/scripts/issues/1380/data/uma.json @@ -0,0 +1 @@ +["@uma"] diff --git a/scripts/issues/1380/db-list-dvs b/scripts/issues/1380/db-list-dvs new file mode 100755 index 00000000000..4161f7fdd03 --- /dev/null +++ b/scripts/issues/1380/db-list-dvs @@ -0,0 +1 @@ +psql dvndb -c "select dvobject.id, name, alias, owner_id from dvobject inner join dataverse on dvobject.id = dataverse.id" diff --git a/scripts/issues/1380/delete-ip-group b/scripts/issues/1380/delete-ip-group new file mode 100755 index 00000000000..b6138d95024 --- /dev/null +++ b/scripts/issues/1380/delete-ip-group @@ -0,0 +1,9 @@ +#/bin/bahx +if [ $# -eq 0 ] + then + echo "Please provide IP group id" + echo "e.g $0 845" + exit 1 +fi + +curl -X DELETE http://localhost:8080/api/admin/groups/ip/$1 diff --git a/scripts/issues/1380/dvs.gv b/scripts/issues/1380/dvs.gv new file mode 100644 index 00000000000..526066000a2 --- /dev/null +++ b/scripts/issues/1380/dvs.gv @@ -0,0 +1,19 @@ +digraph { +d1[label="Root"] +d2[label="Top dataverse of Pete"] +d3[label="Pete's public place"] +d4[label="Pete's restricted data"] +d5[label="Pete's secrets"] +d6[label="Top dataverse of Uma"] +d7[label="Uma's first"] +d8[label="Uma's restricted"] + +d1 -> d2 +d2 -> d3 +d2 -> d4 +d2 -> d5 +d1 -> d6 +d6 -> d7 +d6 -> d8 + +} diff --git a/scripts/issues/1380/dvs.pdf b/scripts/issues/1380/dvs.pdf new file mode 100644 index 00000000000..5169f449420 Binary files /dev/null and b/scripts/issues/1380/dvs.pdf differ diff --git a/scripts/issues/1380/explicitGroup1.json b/scripts/issues/1380/explicitGroup1.json new file mode 100644 index 00000000000..337a0b62dcb --- /dev/null +++ b/scripts/issues/1380/explicitGroup1.json @@ -0,0 +1,5 @@ +{ + "description":"Sample Explicit Group", + "displayName":"Close Collaborators", + "aliasInOwner":"eg1" +} diff --git a/scripts/issues/1380/explicitGroup2.json b/scripts/issues/1380/explicitGroup2.json new file mode 100644 index 00000000000..fbac263665c --- /dev/null +++ b/scripts/issues/1380/explicitGroup2.json @@ -0,0 +1,5 @@ +{ + "description":"Sample Explicit Group", + "displayName":"Not-So-Close Collaborators", + "aliasInOwner":"eg2" +} diff --git a/scripts/issues/1380/keys.txt b/scripts/issues/1380/keys.txt new file mode 100644 index 00000000000..9dc47d356c1 --- /dev/null +++ b/scripts/issues/1380/keys.txt @@ -0,0 +1,3 @@ +Keys for P e t e and U m a. Produced by running setup-all.sh from the /scripts/api folder. +Pete:757a6493-456a-4bf0-943e-9b559d551a3f +Uma:8797f19b-b8aa-4f96-a789-1b99506f2eab diff --git a/scripts/issues/1380/list-groups-for b/scripts/issues/1380/list-groups-for new file mode 100755 index 00000000000..063b92c9b6a --- /dev/null +++ b/scripts/issues/1380/list-groups-for @@ -0,0 +1,2 @@ +#!/bin/bash +curl -s -X GET http://localhost:8080/api/test/explicitGroups/$1 | jq . diff --git a/scripts/issues/1380/list-ip-groups.sh b/scripts/issues/1380/list-ip-groups.sh new file mode 100755 index 00000000000..fba29cced4e --- /dev/null +++ b/scripts/issues/1380/list-ip-groups.sh @@ -0,0 +1,2 @@ +#!/bin/bash +curl -X GET http://localhost:8080/api/admin/groups/ip | jq . diff --git a/scripts/issues/1380/truth-table.numbers b/scripts/issues/1380/truth-table.numbers new file mode 100644 index 00000000000..86f67386fbb Binary files /dev/null and b/scripts/issues/1380/truth-table.numbers differ diff --git a/scripts/issues/1380/users.out b/scripts/issues/1380/users.out new file mode 100644 index 00000000000..337b9e2ce01 --- /dev/null +++ b/scripts/issues/1380/users.out @@ -0,0 +1,6 @@ +{"status":"OK","data":{"user":{"id":4,"firstName":"Gabbi","lastName":"Guest","userName":"gabbi","affiliation":"low","position":"A Guest","email":"gabbi@malinator.com"},"authenticatedUser":{"id":4,"identifier":"@gabbi","displayName":"Gabbi Guest","firstName":"Gabbi","lastName":"Guest","email":"gabbi@malinator.com","superuser":false,"affiliation":"low","position":"A Guest","persistentUserId":"gabbi","authenticationProviderId":"builtin"},"apiToken":"d1940786-c315-491e-9812-a8ff809289cc"}} +{"status":"OK","data":{"user":{"id":5,"firstName":"Cathy","lastName":"Collaborator","userName":"cathy","affiliation":"mid","position":"Data Scientist","email":"cathy@malinator.com"},"authenticatedUser":{"id":5,"identifier":"@cathy","displayName":"Cathy Collaborator","firstName":"Cathy","lastName":"Collaborator","email":"cathy@malinator.com","superuser":false,"affiliation":"mid","position":"Data Scientist","persistentUserId":"cathy","authenticationProviderId":"builtin"},"apiToken":"0ddfcb1e-fb51-4ce7-88ab-308b23e13e9a"}} +{"status":"OK","data":{"user":{"id":6,"firstName":"Nick","lastName":"NSA","userName":"nick","affiliation":"gov","position":"Signals Intelligence","email":"nick@malinator.com"},"authenticatedUser":{"id":6,"identifier":"@nick","displayName":"Nick NSA","firstName":"Nick","lastName":"NSA","email":"nick@malinator.com","superuser":false,"affiliation":"gov","position":"Signals Intelligence","persistentUserId":"nick","authenticationProviderId":"builtin"},"apiToken":"6d74745d-1733-459a-ae29-422110056ec0"}} +reporting API keys +:result: Pete's key is: 757a6493-456a-4bf0-943e-9b559d551a3f +:result: Uma's key is: 8797f19b-b8aa-4f96-a789-1b99506f2eab \ No newline at end of file diff --git a/scripts/issues/2438/download.R b/scripts/issues/2438/download.R index 2d31ed0865b..eea7f185137 100644 --- a/scripts/issues/2438/download.R +++ b/scripts/issues/2438/download.R @@ -13,7 +13,7 @@ download.dataverse.file <- function(url) { # look up the id of the file. As of this writing the easiest way is via SWORD: # https://github.com/IQSS/dataverse/issues/1837#issuecomment-121736332 # - # url.to.download = 'https://apitest.dataverse.org/api/v1/access/datafile/91' + # url.to.download = 'https://demo.dataverse.org/api/v1/access/datafile/91' url.to.download = url tsvfile = 'file.tsv' download.file(url = url.to.download, destfile = @@ -23,4 +23,4 @@ download.dataverse.file <- function(url) { unlink(tsvfile) } -download.dataverse.file(arg) \ No newline at end of file +download.dataverse.file(arg) diff --git a/scripts/search/tests/ipgroup-add b/scripts/search/tests/ipgroup-add index 8033b277258..d41679fd188 100755 --- a/scripts/search/tests/ipgroup-add +++ b/scripts/search/tests/ipgroup-add @@ -1,5 +1,5 @@ #!/bin/sh . scripts/search/export-keys -OUTPUT=`curl -s -X POST -d @scripts/api/data/ipGroup3.json http://localhost:8080/api/admin/groups/ip -H "Content-type:application/json"` +OUTPUT=`curl -s -X POST -d @scripts/api/data/ipGroup-all.json http://localhost:8080/api/admin/groups/ip -H "Content-type:application/json"` echo $OUTPUT echo $OUTPUT | jq . diff --git a/src/main/java/Bundle.properties b/src/main/java/Bundle.properties index bc39855adc6..ca19a51533e 100755 --- a/src/main/java/Bundle.properties +++ b/src/main/java/Bundle.properties @@ -1,8 +1,10 @@ dataverse=Dataverse newDataverse=New Dataverse hostDataverse=Host Dataverse +dataverses=Dataverses passwd=Password dataset=Dataset +datasets=Datasets newDataset=New Dataset files=Files file=File @@ -90,7 +92,7 @@ footer.dataverseOnGitHub=Dataverse On GitHub footer.dataverseProjectOn=Dataverse Project on footer.Twitter=Twitter footer.dataScienceIQSS=Developed at the Institute for Quantitative Social Science -footer.copyright=Copyright © 2016, The President & Fellows of Harvard College +footer.copyright=Copyright © {0} footer.widget.datastored=Data is stored at {0}. footer.widget.login=Log in to footer.privacyPolicy=Privacy Policy @@ -138,7 +140,8 @@ wasPublished=, was published in wasReturnedByReviewer=, was returned by the curator of toReview=Don't forget to publish it or send it back to the contributor! worldMap.added=dataset had a WorldMap layer data added to it. -notification.welcome=Welcome to {0} Dataverse! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. +# Bundle file editors, please note that "notification.welcome" is used in a unit test. +notification.welcome=Welcome to {0} Dataverse! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address. notification.demoSite=Demo Site notification.requestFileAccess=File access requested for dataset: {0}. notification.grantFileAccess=Access granted for files in dataset: {0}. @@ -204,6 +207,16 @@ login.builtin.invalidUsernameEmailOrPassword=The username, email address, or pas # how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922 login.error=Error validating the username, email address, or password. Please try again. If the problem persists, contact an administrator. +#confirmemail.xhtml +confirmEmail.pageTitle=Email Verification +confirmEmail.submitRequest=Verify Email +confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}. +confirmEmail.details.success=Email address verified! +confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button. +confirmEmail.details.goToAccountPageButton=Go to Account Information +confirmEmail.notVerified=Not Verified +confirmEmail.verified=Verified + #shib.xhtml shib.btn.convertAccount=Convert Account shib.btn.createAccount=Create Account @@ -420,9 +433,12 @@ notification.email.returned.dataset.subject=Dataverse: Your dataset has been ret notification.email.create.account.subject=Dataverse: Your account has been created notification.email.assign.role.subject=Dataverse: You have been assigned a role notification.email.revoke.role.subject=Dataverse: Your role has been revoked +notification.email.verifyEmail.subject=Dataverse: Verify your email address notification.email.greeting=Hello, \n +# Bundle file editors, please note that "notification.email.welcome" is used in a unit test notification.email.welcome=Welcome to Dataverse! Get started by adding or finding data. Have questions? Check out the User Guide at {0}/{1}/user/ or contact Dataverse Support for assistance. Want to test out Dataverse features? Use our Demo Site at https://demo.dataverse.org +notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0}. Note, the verify link will expire after {1}. Send another verification email by visiting your account page. notification.email.requestFileAccess=File access requested for dataset: {0}. Manage permissions at {1}. notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1}). notification.email.rejectFileAccess=Access rejected for requested files in dataset: {0} (view at {1}). @@ -437,6 +453,11 @@ notification.email.worldMap.added={0} (view at {1}) had WorldMap layer data adde notification.email.closing=\n\nThank you,\nThe Dataverse Project notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}). notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}). +notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance. +hours=hours +hour=hour +minutes=minutes +minute=minute # passwordreset.xhtml @@ -717,11 +738,11 @@ dataverse.permissions.roles.copy=Copy Role # permissions-manage-files.xhtml -dataverse.permissionsFiles.title=File Permissions +dataverse.permissionsFiles.title=Restricted File Permissions dataverse.permissionsFiles.usersOrGroups=Users/Groups dataverse.permissionsFiles.usersOrGroups.assignBtn=Grant Access to Users/Groups -dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to files in this dataset. +dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to restricted files in this dataset. dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email @@ -731,7 +752,8 @@ dataverse.permissionsFiles.usersOrGroups.file=File dataverse.permissionsFiles.usersOrGroups.files=Files dataverse.permissionsFiles.usersOrGroups.invalidMsg=There are no users or groups with access to the restricted files in this dataset. -dataverse.permissionsFiles.files=Files +dataverse.permissionsFiles.files=Restricted Files +dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files} dataverse.permissionsFiles.files.description=All the restricted files in this dataset. dataverse.permissionsFiles.files.tabHeader.fileName=File Name dataverse.permissionsFiles.files.tabHeader.roleAssignees=Users/Groups @@ -743,8 +765,11 @@ dataverse.permissionsFiles.files.public=Public dataverse.permissionsFiles.files.restricted=Restricted dataverse.permissionsFiles.files.roleAssignee=User/Group dataverse.permissionsFiles.files.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups} dataverse.permissionsFiles.files.assignBtn=Assign Access dataverse.permissionsFiles.files.invalidMsg=There are no restricted files in this dataset. +dataverse.permissionsFiles.files.requested=Requested Files +dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2} dataverse.permissionsFiles.viewRemoveDialog.header=File Access dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Remove Access @@ -752,12 +777,11 @@ dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=Are you sure dataverse.permissionsFiles.assignDialog.header=Grant File Access dataverse.permissionsFiles.assignDialog.description=Grant file access to users and groups. -dataverse.permissionsFiles.assignDialog.userOrGroup=User/Group -dataverse.permissionsFiles.assignDialog.userOrGroup.title=User/Group +dataverse.permissionsFiles.assignDialog.userOrGroup=Users/Groups dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Enter User/Group Name dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No matches found. dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. -dataverse.permissionsFiles.assignDialog.file=File +dataverse.permissionsFiles.assignDialog.fileName=File Name dataverse.permissionsFiles.assignDialog.grantBtn=Grant dataverse.permissionsFiles.assignDialog.rejectBtn=Reject @@ -779,11 +803,12 @@ dataverse.permissions.Q2.answer.curator.description=- Edit metadata, upload file dataverse.permissions.usersOrGroups.assignDialog.header=Assign Role dataverse.permissions.usersOrGroups.assignDialog.description=Grant permissions to users and groups by assigning them a role. -dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=User/Group +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Users/Groups dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Enter User/Group Name dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No matches found. dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. dataverse.permissions.usersOrGroups.assignDialog.role.description=These are the permissions associated with the selected role. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}. dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Please select a role to assign. # roles-edit.xhtml @@ -804,6 +829,7 @@ dataverse.permissions.explicitGroupEditDialog.title.new=Create Group dataverse.permissions.explicitGroupEditDialog.title.edit=Edit Group {0} dataverse.permissions.explicitGroupEditDialog.help=Add users or other groups to this group. dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Group Identifier +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group. dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=Group identifier cannot be empty dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=Group identifier can contain only letters, digits, underscores (_) and dashes (-) dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consists of letters, digits, underscores (_) and dashes (-) @@ -978,6 +1004,8 @@ dataset.editBtn.itemLabel.terms=Terms dataset.editBtn.itemLabel.permissions=Permissions dataset.editBtn.itemLabel.widgets=Widgets dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.permissionsDataset=Dataset +dataset.editBtn.itemLabel.permissionsFile=Restricted Files dataset.editBtn.itemLabel.deleteDataset=Delete Dataset dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version dataset.editBtn.itemLabel.deaccession=Deaccession Dataset @@ -1125,10 +1153,11 @@ file.noSelectedFiles.tip=There are no selected files to display. file.noUploadedFiles.tip=Files you upload will appear here. file.delete=Delete file.metadata=Metadata -file.deleted.success=Files {0} will be permanently deleted from this version of this dataset once you click on the Save Changes button. +file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button. +file.editAccess=Edit Access file.restrict=Restrict file.unrestrict=Unrestrict -file.restricted.success=The file(s) {0} will be restricted after you click on the Save Changes button on the bottom of this page. +file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button. file.download.header=Download file.preview=Preview: file.fileName=File Name diff --git a/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java b/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java index 742c86c70b8..0a6a930a94e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java +++ b/src/main/java/edu/harvard/iq/dataverse/BibtexCitation.java @@ -71,7 +71,7 @@ public String getPublisher() { public String toString() { StringBuilder citation = new StringBuilder("@data{"); citation.append(persistentId.getIdentifier() + "_" + year + "," + "\r\n"); - citation.append("author = {").append(String.join("; ", authors)).append("},\r\n"); + citation.append("author = {").append(String.join(" and ", authors)).append("},\r\n"); citation.append("publisher = {").append(publisher).append("},\r\n"); citation.append("title = {").append(title).append("},\r\n"); citation.append("year = {").append(year).append("},\r\n"); diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 24c01e3b107..057faf4211e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -16,6 +16,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.Files; +import java.util.Comparator; import javax.persistence.Entity; import javax.persistence.OneToMany; import javax.persistence.OneToOne; @@ -44,7 +45,7 @@ , @Index(columnList="md5") , @Index(columnList="contenttype") , @Index(columnList="restricted")}) -public class DataFile extends DvObject { +public class DataFile extends DvObject implements Comparable { private static final long serialVersionUID = 1L; public static final char INGEST_STATUS_NONE = 65; @@ -228,6 +229,11 @@ public String getOriginalFileFormat() { return null; } + @Override + public boolean isAncestorOf( DvObject other ) { + return equals(other); + } + /* * A user-friendly version of the "original format": */ @@ -604,6 +610,13 @@ public String getDisplayName() { return getLatestFileMetadata().getLabel(); } + @Override + public int compareTo(Object o) { + DataFile other = (DataFile) o; + return this.getDisplayName().toUpperCase().compareTo(other.getDisplayName().toUpperCase()); + + } + /** * Check if the Geospatial Tag has been assigned to this file * @return @@ -619,4 +632,4 @@ public boolean hasGeospatialTag(){ } return false; } -} +} \ No newline at end of file diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 0acf76dc00e..39a953d0de7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -654,5 +654,9 @@ public String getDisplayName() { protected boolean isPermissionRoot() { return false; } - + + @Override + public boolean isAncestorOf( DvObject other ) { + return equals(other) || equals(other.getOwner()); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 882608dff45..55811ac8b1a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -83,7 +83,6 @@ import javax.faces.event.AjaxBehaviorEvent; -import javax.faces.context.ExternalContext; import org.apache.commons.lang.StringEscapeUtils; import org.primefaces.component.tabview.TabView; @@ -187,7 +186,6 @@ public enum DisplayMode { private List