From 6ba2a1f4e3d282575325db160ed07282cbb734b0 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 13 May 2022 11:05:08 +0000 Subject: [PATCH 001/354] dataverse.no --- .../dataverse.no/configs/backup-http-ssl.conf | 294 ++++ .../dataverse.no/configs/backup.http-ssl.conf | 287 +++ distros/dataverse.no/configs/domain.xml | 649 +++++++ .../configs/htdocs-ssl/index.html | 1 + .../dataverse.no/configs/htdocs/index.html | 1 + .../dataverse.no/configs/http-ssl-test.conf | 322 ++++ distros/dataverse.no/configs/http-ssl.conf | 349 ++++ distros/dataverse.no/configs/httpd.conf | 551 ++++++ distros/dataverse.no/configs/schema.xml | 1546 +++++++++++++++++ distros/dataverse.no/docker-compose.yaml | 282 +++ .../dataverse.no/init.d/006-s3-aws-storage.sh | 19 + .../dataverse.no/init.d/007-s3-aws-storage.sh | 19 + .../dataverse.no/init.d/01-persistent-id.sh | 22 + .../dataverse.no/init.d/010-mailrelay-set.sh | 9 + .../dataverse.no/init.d/011-local-storage.sh | 6 + .../dataverse.no/init.d/012-minio-bucket1.sh | 17 + .../dataverse.no/init.d/021-jhove-set-link.sh | 3 + distros/dataverse.no/init.d/022-splitpath.sh | 3 + distros/dataverse.no/init.d/03-doi-set.sh | 15 + distros/dataverse.no/init.d/04-setdomain.sh | 10 + distros/dataverse.no/init.d/07-previewers.sh | 38 + .../dataverse.no/init.d/08-federated-login.sh | 7 + distros/dataverse.no/init.d/105-reindex.sh | 3 + distros/dataverse.no/init.d/cors.json | 10 + 24 files changed, 4463 insertions(+) create mode 100755 distros/dataverse.no/configs/backup-http-ssl.conf create mode 100755 distros/dataverse.no/configs/backup.http-ssl.conf create mode 100644 distros/dataverse.no/configs/domain.xml create mode 100755 distros/dataverse.no/configs/htdocs-ssl/index.html create mode 100755 distros/dataverse.no/configs/htdocs/index.html create mode 100755 distros/dataverse.no/configs/http-ssl-test.conf create mode 100755 distros/dataverse.no/configs/http-ssl.conf create mode 100755 distros/dataverse.no/configs/httpd.conf create mode 100644 distros/dataverse.no/configs/schema.xml create mode 100755 distros/dataverse.no/docker-compose.yaml create mode 100755 distros/dataverse.no/init.d/006-s3-aws-storage.sh create mode 100755 distros/dataverse.no/init.d/007-s3-aws-storage.sh create mode 100755 distros/dataverse.no/init.d/01-persistent-id.sh create mode 100755 distros/dataverse.no/init.d/010-mailrelay-set.sh create mode 100755 distros/dataverse.no/init.d/011-local-storage.sh create mode 100755 distros/dataverse.no/init.d/012-minio-bucket1.sh create mode 100755 distros/dataverse.no/init.d/021-jhove-set-link.sh create mode 100755 distros/dataverse.no/init.d/022-splitpath.sh create mode 100755 distros/dataverse.no/init.d/03-doi-set.sh create mode 100755 distros/dataverse.no/init.d/04-setdomain.sh create mode 100755 distros/dataverse.no/init.d/07-previewers.sh create mode 100755 distros/dataverse.no/init.d/08-federated-login.sh create mode 100755 distros/dataverse.no/init.d/105-reindex.sh create mode 100644 distros/dataverse.no/init.d/cors.json diff --git a/distros/dataverse.no/configs/backup-http-ssl.conf b/distros/dataverse.no/configs/backup-http-ssl.conf new file mode 100755 index 0000000..19247ee --- /dev/null +++ b/distros/dataverse.no/configs/backup-http-ssl.conf @@ -0,0 +1,294 @@ +# +# When we also provide SSL we have to listen to the +# the HTTPS port in addition. +# +Listen 443 https + +## +## SSL Global Context +## +## All SSL configuration in this context applies both to +## the main server and all SSL-enabled virtual hosts. +## + +# Pass Phrase Dialog: +# Configure the pass phrase gathering process. +# The filtering dialog program (`builtin' is a internal +# terminal dialog) has to provide the pass phrase on stdout. +SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +## +## SSL Virtual Host Context +## +# + + ServerName test-docker.dataverse.no + + SSLProxyEngine on + ProxyPass / https://test-docker.dataverse.no:443/ + ProxyPassReverse / https://test-docker.dataverse.no:443/ + + + + +# General setup for the virtual host, inherited from global configuration +#DocumentRoot "/var/www/html" +#ServerName www.example.com:443 +ServerName test-docker.dataverse.no +Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" +# Content-Security-Policy: noen java-filer laster fra http, så denne kan +# ikke brukes. +#Header always set Content-Security-Policy "default-src https:" +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +#:443 + +# Use separate log files for the SSL virtual host; note that LogLevel +# is not inherited from httpd.conf. +ErrorLog /dev/stdout +TransferLog /dev/stdout +LoadModule dumpio_module modules/mod_dumpio.so + + DumpIOInput On + DumpIOOutput On + +LogLevel dumpio:trace7 + +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. +SSLEngine on + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol all -SSLv2 -SSLv3 + +# SSL Cipher Suite: +# List the ciphers that the client is permitted to negotiate. +# See the mod_ssl documentation for a complete list. +SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA + +# Speed-optimized SSL Cipher configuration: +# If speed is your main concern (on busy HTTPS servers e.g.), +# you might want to force clients to specific, performance +# optimized ciphers. In this case, prepend those ciphers +# to the SSLCipherSuite list, and enable SSLHonorCipherOrder. +# Caveat: by giving precedence to RC4-SHA and AES128-SHA +# (as in the example below), most connections will no longer +# have perfect forward secrecy - if the server's key is +# compromised, captures of past or future traffic must be +# considered compromised, too. +#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5 +#SSLHonorCipherOrder on + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +SSLCertificateFile /etc/pki/tls/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +SSLCertificateKeyFile /etc/pki/tls/private/localhost.key + +# Server Certificate Chain: +# Point SSLCertificateChainFile at a file containing the +# concatenation of PEM encoded CA certificates which form the +# certificate chain for the server certificate. Alternatively +# the referenced file can be the same as SSLCertificateFile +# when the CA certificates are directly appended to the server +# certificate for convinience. +#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt + +# Certificate Authority (CA): +# Set the CA certificate verification path where to find CA +# certificates for client authentication or alternatively one +# huge file containing all of them (file must be PEM encoded) +#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt + +# Client Authentication (Type): +# Client certificate verification type and depth. Types are +# none, optional, require and optional_no_ca. Depth is a +# number which specifies how deeply to verify the certificate +# issuer chain before deciding the certificate is not valid. +#SSLVerifyClient require +#SSLVerifyDepth 10 + +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + +#ProxyPass / ajp://dataverse:8009 +#ProxyPassReverse / ajp://dataverse:8009 + +# Access Control: +# With SSLRequire you can do per-directory access control based +# on arbitrary complex boolean expressions containing server +# variable checks and other lookup directives. The syntax is a +# mixture between C and Perl. See the mod_ssl documentation +# for more details. +# +#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \ +# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \ +# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \ +# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \ +# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \ +# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/ +# + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +#CustomLog /dev/stdout \ +# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" +Customlog /var/log/httpd/access.log combined +ErrorLog /var/log/httpd/error.log + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + diff --git a/distros/dataverse.no/configs/backup.http-ssl.conf b/distros/dataverse.no/configs/backup.http-ssl.conf new file mode 100755 index 0000000..414970c --- /dev/null +++ b/distros/dataverse.no/configs/backup.http-ssl.conf @@ -0,0 +1,287 @@ +# +# When we also provide SSL we have to listen to the +# the HTTPS port in addition. +# +Listen 443 https + +## +## SSL Global Context +## +## All SSL configuration in this context applies both to +## the main server and all SSL-enabled virtual hosts. +## + +# Pass Phrase Dialog: +# Configure the pass phrase gathering process. +# The filtering dialog program (`builtin' is a internal +# terminal dialog) has to provide the pass phrase on stdout. +SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +## +## SSL Virtual Host Context +## +# + + ServerName test-docker.dataverse.no + + SSLProxyEngine on + ProxyPass / https://test-docker.dataverse.no:443/ + ProxyPassReverse / https://test-docker.dataverse.no:443/ + + + + +# General setup for the virtual host, inherited from global configuration +#DocumentRoot "/var/www/html" +#ServerName www.example.com:443 +ServerName test-docker.dataverse.no +Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" +# Content-Security-Policy: noen java-filer laster fra http, så denne kan +# ikke brukes. +#Header always set Content-Security-Policy "default-src https:" +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +#:443 + +# Use separate log files for the SSL virtual host; note that LogLevel +# is not inherited from httpd.conf. +ErrorLog /dev/stdout +TransferLog /dev/stdout +LogLevel warn + +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. +SSLEngine on + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol all -SSLv2 -SSLv3 + +# SSL Cipher Suite: +# List the ciphers that the client is permitted to negotiate. +# See the mod_ssl documentation for a complete list. +SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA + +# Speed-optimized SSL Cipher configuration: +# If speed is your main concern (on busy HTTPS servers e.g.), +# you might want to force clients to specific, performance +# optimized ciphers. In this case, prepend those ciphers +# to the SSLCipherSuite list, and enable SSLHonorCipherOrder. +# Caveat: by giving precedence to RC4-SHA and AES128-SHA +# (as in the example below), most connections will no longer +# have perfect forward secrecy - if the server's key is +# compromised, captures of past or future traffic must be +# considered compromised, too. +#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5 +#SSLHonorCipherOrder on + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +SSLCertificateFile /etc/pki/tls/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +SSLCertificateKeyFile /etc/pki/tls/private/localhost.key + +# Server Certificate Chain: +# Point SSLCertificateChainFile at a file containing the +# concatenation of PEM encoded CA certificates which form the +# certificate chain for the server certificate. Alternatively +# the referenced file can be the same as SSLCertificateFile +# when the CA certificates are directly appended to the server +# certificate for convinience. +#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt + +# Certificate Authority (CA): +# Set the CA certificate verification path where to find CA +# certificates for client authentication or alternatively one +# huge file containing all of them (file must be PEM encoded) +#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt + +# Client Authentication (Type): +# Client certificate verification type and depth. Types are +# none, optional, require and optional_no_ca. Depth is a +# number which specifies how deeply to verify the certificate +# issuer chain before deciding the certificate is not valid. +#SSLVerifyClient require +#SSLVerifyDepth 10 + +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + +#ProxyPass / ajp://dataverse:8009 +#ProxyPassReverse / ajp://dataverse:8009 + +# Access Control: +# With SSLRequire you can do per-directory access control based +# on arbitrary complex boolean expressions containing server +# variable checks and other lookup directives. The syntax is a +# mixture between C and Perl. See the mod_ssl documentation +# for more details. +# +#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \ +# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \ +# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \ +# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \ +# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \ +# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/ +# + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +CustomLog /dev/stdout \ + "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + diff --git a/distros/dataverse.no/configs/domain.xml b/distros/dataverse.no/configs/domain.xml new file mode 100644 index 0000000..818f4e3 --- /dev/null +++ b/distros/dataverse.no/configs/domain.xml @@ -0,0 +1,649 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + log-notifier + + + + + + + + + + log-notifier + + + + log-notifier + + + log-notifier + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -server + [9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED + [9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED + [9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED + [9|]--add-opens=java.base/java.lang=ALL-UNNAMED + [9|]--add-opens=java.base/java.net=ALL-UNNAMED + [9|]--add-opens=java.base/java.nio=ALL-UNNAMED + [9|]--add-opens=java.base/java.util=ALL-UNNAMED + [9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED + [9|]--add-opens=java.management/sun.management=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED + [9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED + [9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED + [9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED + -XX:NewRatio=2 + -XX:+UnlockDiagnosticVMOptions + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -Djava.awt.headless=true + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Djavax.xml.accessExternalSchema=all + -Djdbc.drivers=org.h2.Driver + -Djdk.corba.allowOutputStreamSubclass=true + -Djdk.tls.rejectClientInitiatedRenegotiation=true + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.shell.remote,org.apache.felix.fileinstall + -Dosgi.shell.telnet.port=6666 + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dgosh.args=--nointeractive + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=2 + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.disableConfigSave=false + -Dcom.ctc.wstx.returnNullForDefaultNamespace=true + -Dorg.glassfish.grizzly.DEFAULT_MEMORY_MANAGER=org.glassfish.grizzly.memory.HeapMemoryManager + -Dorg.glassfish.grizzly.nio.DefaultSelectorHandler.force-selector-spin-detection=true + -Dorg.jboss.weld.serialization.beanIdentifierIndexOptimization=false + [|8]-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + [|8]-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext + [1.8.0|1.8.0u120]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.6.jar + [1.8.0u121|1.8.0u160]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.7.jar + [1.8.0u161|1.8.0u190]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.jar + [1.8.0u191|1.8.0u250]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.1.jar + [1.8.0u251|]-Xbootclasspath/a:${com.sun.aas.installRoot}/lib/grizzly-npn-api.jar + [Azul-1.8.0u222|1.8.0u260]-XX:+UseOpenJSSE + -XX:+UseContainerSupport + -XX:MaxRAMPercentage=${ENV=MEM_MAX_RAM_PERCENTAGE} + -Xss${ENV=MEM_XSS} + -Ddataverse.files.S3.type=s3 + -Ddataverse.files.S3.label=S3 + -Ddataverse.files.S3.bucket-name=2002-green-dataversenotest1 + -Ddataverse.files.S3.download-redirect=true + -Ddataverse.files.S3.url-expiration-minutes=120 + -Ddataverse.files.S3.connection-pool-size=4096 + -Ddataverse.files.storage-driver-id=S3 + -Ddataverse.files.S3.profile=cloudian + -Ddataverse.files.S3.custom-endpoint-url=https://s3-oslo.educloud.no + -Ddataverse.files.file.type=file + -Ddataverse.files.file.label=file + -Ddataverse.files.file.directory=/data + -Ddoi.username=BIBSYS.UIT-ORD + -Ddoi.password=${ALIAS=doi_password_alias} + -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org + -Ddoi.baseurlstring=https://mds.test.datacite.org + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + log-notifier + + + + + + + log-notifier + + + + log-notifier + + + log-notifier + + + + -server + [9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED + [9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED + [9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED + [9|]--add-opens=java.base/java.lang=ALL-UNNAMED + [9|]--add-opens=java.base/java.net=ALL-UNNAMED + [9|]--add-opens=java.base/java.nio=ALL-UNNAMED + [9|]--add-opens=java.base/java.util=ALL-UNNAMED + [9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED + [9|]--add-opens=java.management/sun.management=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED + [9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED + [9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED + [9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED + -Xmx512m + -XX:NewRatio=2 + -XX:+UnlockDiagnosticVMOptions + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -Djava.awt.headless=true + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Djdbc.drivers=org.h2.Driver + -Djdk.corba.allowOutputStreamSubclass=true + -Djdk.tls.rejectClientInitiatedRenegotiation=true + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall + -Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT} + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dgosh.args=--nointeractive + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=3 + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.disableConfigSave=false + -Dorg.glassfish.grizzly.DEFAULT_MEMORY_MANAGER=org.glassfish.grizzly.memory.HeapMemoryManager + -Dorg.glassfish.grizzly.nio.DefaultSelectorHandler.force-selector-spin-detection=true + -Dorg.jboss.weld.serialization.beanIdentifierIndexOptimization=false + [|8]-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + [|8]-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext + [1.8.0|1.8.0u120]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.6.jar + [1.8.0u121|1.8.0u160]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.7.jar + [1.8.0u161|1.8.0u190]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.jar + [1.8.0u191|1.8.0u250]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.1.jar + [1.8.0u251|]-Xbootclasspath/a:${com.sun.aas.installRoot}/lib/grizzly-npn-api.jar + [Azul-1.8.0u222|1.8.0u260]-XX:+UseOpenJSSE + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/distros/dataverse.no/configs/htdocs-ssl/index.html b/distros/dataverse.no/configs/htdocs-ssl/index.html new file mode 100755 index 0000000..6b233f9 --- /dev/null +++ b/distros/dataverse.no/configs/htdocs-ssl/index.html @@ -0,0 +1 @@ +

It works with SSL!

diff --git a/distros/dataverse.no/configs/htdocs/index.html b/distros/dataverse.no/configs/htdocs/index.html new file mode 100755 index 0000000..f5f1c37 --- /dev/null +++ b/distros/dataverse.no/configs/htdocs/index.html @@ -0,0 +1 @@ +

It works!

diff --git a/distros/dataverse.no/configs/http-ssl-test.conf b/distros/dataverse.no/configs/http-ssl-test.conf new file mode 100755 index 0000000..d33c651 --- /dev/null +++ b/distros/dataverse.no/configs/http-ssl-test.conf @@ -0,0 +1,322 @@ +# +# When we also provide SSL we have to listen to the +# the HTTPS port in addition. +# +Listen 9443 https + +## +## SSL Global Context +## +## All SSL configuration in this context applies both to +## the main server and all SSL-enabled virtual hosts. +## + +# Pass Phrase Dialog: +# Configure the pass phrase gathering process. +# The filtering dialog program (`builtin' is a internal +# terminal dialog) has to provide the pass phrase on stdout. +SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +## +## SSL Virtual Host Context +## +# + + ServerName test.dataverse.no + DocumentRoot /var/www/html + #ErrorLog /var/logs/http-error_log + #CustomLog /var/logs/http-access_log combined env=!monitor + + Header always set X-Frame-Options "SAMEORIGIN" + Header always set X-XSS-Protection "1; mode=block" + Header always set X-Content-Type-Options "nosniff" + + + Options None + Require all granted + + + RewriteEngine On + RewriteCond %{HTTPS} !=on + RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L] + + + + + +# General setup for the virtual host, inherited from global configuration +#DocumentRoot "/var/www/html" +#ServerName www.example.com:443 +ServerName test.dataverse.no +Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" +# Content-Security-Policy: noen java-filer laster fra http, så denne kan +# ikke brukes. +#Header always set Content-Security-Policy "default-src https:" +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +#:443 + +# Use separate log files for the SSL virtual host; note that LogLevel +# is not inherited from httpd.conf. +ErrorLog /dev/stdout +TransferLog /dev/stdout +LoadModule dumpio_module modules/mod_dumpio.so + + DumpIOInput On + DumpIOOutput On + +LogLevel dumpio:trace7 + +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. +SSLEngine on + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol all -SSLv2 -SSLv3 + +# SSL Cipher Suite: +# List the ciphers that the client is permitted to negotiate. +# See the mod_ssl documentation for a complete list. +SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA + +# Speed-optimized SSL Cipher configuration: +# If speed is your main concern (on busy HTTPS servers e.g.), +# you might want to force clients to specific, performance +# optimized ciphers. In this case, prepend those ciphers +# to the SSLCipherSuite list, and enable SSLHonorCipherOrder. +# Caveat: by giving precedence to RC4-SHA and AES128-SHA +# (as in the example below), most connections will no longer +# have perfect forward secrecy - if the server's key is +# compromised, captures of past or future traffic must be +# considered compromised, too. +#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5 +#SSLHonorCipherOrder on + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +# vty +SSLCertificateFile /etc/pki/tls/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +# #vty +SSLCertificateKeyFile /etc/pki/tls/private/localhost.key + +# Server Certificate Chain: +# Point SSLCertificateChainFile at a file containing the +# concatenation of PEM encoded CA certificates which form the +# certificate chain for the server certificate. Alternatively +# the referenced file can be the same as SSLCertificateFile +# when the CA certificates are directly appended to the server +# certificate for convinience. +#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt + +# Certificate Authority (CA): +# Set the CA certificate verification path where to find CA +# certificates for client authentication or alternatively one +# huge file containing all of them (file must be PEM encoded) +#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt + +# Client Authentication (Type): +# Client certificate verification type and depth. Types are +# none, optional, require and optional_no_ca. Depth is a +# number which specifies how deeply to verify the certificate +# issuer chain before deciding the certificate is not valid. +#SSLVerifyClient require +#SSLVerifyDepth 10 + +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require valid-user + + + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + +ProxyPassInterpolateEnv On +ProxyPassMatch ^/Shibboleth.sso ! +ProxyPassMatch ^/shibboleth-ds ! +ProxyPass / ajp://dataverse:8009/ interpolate +ProxyPassReverse / ajp://dataverse:8009/ interpolate +ProxyPassReverseCookieDomain "dataverse" "test.dataverse.no" interpolate +ProxyPassReverseCookiePath "/" "/" interpolate + +# Access Control: +# With SSLRequire you can do per-directory access control based +# on arbitrary complex boolean expressions containing server +# variable checks and other lookup directives. The syntax is a +# mixture between C and Perl. See the mod_ssl documentation +# for more details. +# +#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \ +# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \ +# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \ +# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \ +# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \ +# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/ +# + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +#CustomLog /dev/stdout \ +# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" +Customlog /var/log/httpd/access.log combined +ErrorLog /var/log/httpd/error.log + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + diff --git a/distros/dataverse.no/configs/http-ssl.conf b/distros/dataverse.no/configs/http-ssl.conf new file mode 100755 index 0000000..b7e7371 --- /dev/null +++ b/distros/dataverse.no/configs/http-ssl.conf @@ -0,0 +1,349 @@ +# +# When we also provide SSL we have to listen to the +# the HTTPS port in addition. +# +Listen 9443 https +TimeOut 600 +LimitRequestBody 0 + +## +## SSL Global Context +## +## All SSL configuration in this context applies both to +## the main server and all SSL-enabled virtual hosts. +## + +# Pass Phrase Dialog: +# Configure the pass phrase gathering process. +# The filtering dialog program (`builtin' is a internal +# terminal dialog) has to provide the pass phrase on stdout. +SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +## +## SSL Virtual Host Context +## +# + + ServerName test-docker.dataverse.no + DocumentRoot /var/www/html + #ErrorLog /var/logs/http-error_log + #CustomLog /var/logs/http-access_log combined env=!monitor + + Header always set X-Frame-Options "SAMEORIGIN" + Header always set X-XSS-Protection "1; mode=block" + Header always set X-Content-Type-Options "nosniff" + + + Options None + Require all granted + + + RewriteEngine On + RewriteCond %{HTTPS} !=on + RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L] + + + + + +# General setup for the virtual host, inherited from global configuration +#DocumentRoot "/var/www/html" +#ServerName www.example.com:443 +ServerName test-docker.dataverse.no +Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" +# Content-Security-Policy: noen java-filer laster fra http, så denne kan +# ikke brukes. +#Header always set Content-Security-Policy "default-src https:" +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +#:443 + +# Use separate log files for the SSL virtual host; note that LogLevel +# is not inherited from httpd.conf. +ErrorLog /dev/stdout +TransferLog /dev/stdout +#LoadModule dumpio_module modules/mod_dumpio.so +# +# DumpIOInput On +# DumpIOOutput On +# +#LogLevel dumpio:trace7 + +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. +SSLEngine on + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol all -SSLv2 -SSLv3 + +# SSL Cipher Suite: +# List the ciphers that the client is permitted to negotiate. +# See the mod_ssl documentation for a complete list. +SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA + +# Speed-optimized SSL Cipher configuration: +# If speed is your main concern (on busy HTTPS servers e.g.), +# you might want to force clients to specific, performance +# optimized ciphers. In this case, prepend those ciphers +# to the SSLCipherSuite list, and enable SSLHonorCipherOrder. +# Caveat: by giving precedence to RC4-SHA and AES128-SHA +# (as in the example below), most connections will no longer +# have perfect forward secrecy - if the server's key is +# compromised, captures of past or future traffic must be +# considered compromised, too. +#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5 +#SSLHonorCipherOrder on + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +# vty +SSLCertificateFile /etc/pki/tls/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +# #vty +SSLCertificateKeyFile /etc/pki/tls/private/localhost.key + +# Server Certificate Chain: +# Point SSLCertificateChainFile at a file containing the +# concatenation of PEM encoded CA certificates which form the +# certificate chain for the server certificate. Alternatively +# the referenced file can be the same as SSLCertificateFile +# when the CA certificates are directly appended to the server +# certificate for convinience. +#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt + +# Certificate Authority (CA): +# Set the CA certificate verification path where to find CA +# certificates for client authentication or alternatively one +# huge file containing all of them (file must be PEM encoded) +#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt + +# Client Authentication (Type): +# Client certificate verification type and depth. Types are +# none, optional, require and optional_no_ca. Depth is a +# number which specifies how deeply to verify the certificate +# issuer chain before deciding the certificate is not valid. +#SSLVerifyClient require +#SSLVerifyDepth 10 + +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require valid-user + + + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + +ProxyPassInterpolateEnv On +ProxyPassMatch ^/Shibboleth.sso ! +ProxyPassMatch ^/shibboleth-ds ! +ProxyPassMatch ^/phpPgAdmin ! +ProxyPassMatch ^/nav ! +ProxyPassMatch ^/minio ! +ProxyPass / ajp://dataverse:8009/ interpolate +ProxyPassReverse / ajp://dataverse:8009/ interpolate +ProxyPassReverseCookieDomain "dataverse" "test.dataverse.no" interpolate +ProxyPassReverseCookiePath "/" "/" interpolate + + + #AuthType Basic + #AuthBasicProvider ldap + AuthName "AD brukernavn og passord" + ProxyPass http://phppgadmin/ + #http://pgadmin_container + ProxyPassReverse http://phppgadmin/ + #http://pgadmin_container + #AuthLDAPURL "ldaps://ldap.uit.no/cn=people,dc=uit,dc=no?uid" + #Require ldap-user lss734 ood000 pco000 kni006 + + + + ProxyPass http://minio:9000/ + ProxyPassReverse http://minio:9000/ + + + + ProxyPass http://nav/ + ProxyPassReverse http://nav/ + + +# Access Control: +# With SSLRequire you can do per-directory access control based +# on arbitrary complex boolean expressions containing server +# variable checks and other lookup directives. The syntax is a +# mixture between C and Perl. See the mod_ssl documentation +# for more details. +# +#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \ +# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \ +# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \ +# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \ +# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \ +# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/ +# + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +#CustomLog /dev/stdout \ +# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" +Customlog /var/log/httpd/access.log combined +ErrorLog /var/log/httpd/error.log + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + diff --git a/distros/dataverse.no/configs/httpd.conf b/distros/dataverse.no/configs/httpd.conf new file mode 100755 index 0000000..be8e2bf --- /dev/null +++ b/distros/dataverse.no/configs/httpd.conf @@ -0,0 +1,551 @@ +# +# This is the main Apache HTTP server configuration file. It contains the +# configuration directives that give the server its instructions. +# See for detailed information. +# In particular, see +# +# for a discussion of each configuration directive. +# +# Do NOT simply read the instructions in here without understanding +# what they do. They're here only as hints or reminders. If you are unsure +# consult the online docs. You have been warned. +# +# Configuration and logfile names: If the filenames you specify for many +# of the server's control files begin with "/" (or "drive:/" for Win32), the +# server will use that explicit path. If the filenames do *not* begin +# with "/", the value of ServerRoot is prepended -- so "logs/access_log" +# with ServerRoot set to "/usr/local/apache2" will be interpreted by the +# server as "/usr/local/apache2/logs/access_log", whereas "/logs/access_log" +# will be interpreted as '/logs/access_log'. + +# +# ServerRoot: The top of the directory tree under which the server's +# configuration, error, and log files are kept. +# +# Do not add a slash at the end of the directory path. If you point +# ServerRoot at a non-local disk, be sure to specify a local disk on the +# Mutex directive, if file-based mutexes are used. If you wish to share the +# same ServerRoot for multiple httpd daemons, you will need to change at +# least PidFile. +# +ServerRoot "/usr/local/apache2" + +# +# Mutex: Allows you to set the mutex mechanism and mutex file directory +# for individual mutexes, or change the global defaults +# +# Uncomment and change the directory if mutexes are file-based and the default +# mutex file directory is not on a local disk or is not appropriate for some +# other reason. +# +# Mutex default:logs + +# +# Listen: Allows you to bind Apache to specific IP addresses and/or +# ports, instead of the default. See also the +# directive. +# +# Change this to Listen on specific IP addresses as shown below to +# prevent Apache from glomming onto all bound IP addresses. +# +#Listen 12.34.56.78:80 +Listen 80 + +# +# Dynamic Shared Object (DSO) Support +# +# To be able to use the functionality of a module which was built as a DSO you +# have to place corresponding `LoadModule' lines at this location so the +# directives contained in it are actually available _before_ they are used. +# Statically compiled modules (those listed by `httpd -l') do not need +# to be loaded here. +# +# Example: +# LoadModule foo_module modules/mod_foo.so +# +LoadModule mpm_event_module modules/mod_mpm_event.so +#LoadModule mpm_prefork_module modules/mod_mpm_prefork.so +#LoadModule mpm_worker_module modules/mod_mpm_worker.so +LoadModule authn_file_module modules/mod_authn_file.so +#LoadModule authn_dbm_module modules/mod_authn_dbm.so +#LoadModule authn_anon_module modules/mod_authn_anon.so +#LoadModule authn_dbd_module modules/mod_authn_dbd.so +#LoadModule authn_socache_module modules/mod_authn_socache.so +LoadModule authn_core_module modules/mod_authn_core.so +LoadModule authz_host_module modules/mod_authz_host.so +LoadModule authz_groupfile_module modules/mod_authz_groupfile.so +LoadModule authz_user_module modules/mod_authz_user.so +#LoadModule authz_dbm_module modules/mod_authz_dbm.so +#LoadModule authz_owner_module modules/mod_authz_owner.so +#LoadModule authz_dbd_module modules/mod_authz_dbd.so +LoadModule authz_core_module modules/mod_authz_core.so +#LoadModule authnz_ldap_module modules/mod_authnz_ldap.so +#LoadModule authnz_fcgi_module modules/mod_authnz_fcgi.so +LoadModule access_compat_module modules/mod_access_compat.so +LoadModule auth_basic_module modules/mod_auth_basic.so +#LoadModule auth_form_module modules/mod_auth_form.so +#LoadModule auth_digest_module modules/mod_auth_digest.so +#LoadModule allowmethods_module modules/mod_allowmethods.so +#LoadModule isapi_module modules/mod_isapi.so +#LoadModule file_cache_module modules/mod_file_cache.so +#LoadModule cache_module modules/mod_cache.so +#LoadModule cache_disk_module modules/mod_cache_disk.so +#LoadModule cache_socache_module modules/mod_cache_socache.so +LoadModule socache_shmcb_module modules/mod_socache_shmcb.so +#LoadModule socache_dbm_module modules/mod_socache_dbm.so +#LoadModule socache_memcache_module modules/mod_socache_memcache.so +#LoadModule socache_redis_module modules/mod_socache_redis.so +#LoadModule watchdog_module modules/mod_watchdog.so +#LoadModule macro_module modules/mod_macro.so +#LoadModule dbd_module modules/mod_dbd.so +#LoadModule bucketeer_module modules/mod_bucketeer.so +#LoadModule dumpio_module modules/mod_dumpio.so +#LoadModule echo_module modules/mod_echo.so +#LoadModule example_hooks_module modules/mod_example_hooks.so +#LoadModule case_filter_module modules/mod_case_filter.so +#LoadModule case_filter_in_module modules/mod_case_filter_in.so +#LoadModule example_ipc_module modules/mod_example_ipc.so +#LoadModule buffer_module modules/mod_buffer.so +#LoadModule data_module modules/mod_data.so +#LoadModule ratelimit_module modules/mod_ratelimit.so +LoadModule reqtimeout_module modules/mod_reqtimeout.so +#LoadModule ext_filter_module modules/mod_ext_filter.so +#LoadModule request_module modules/mod_request.so +#LoadModule include_module modules/mod_include.so +LoadModule filter_module modules/mod_filter.so +#LoadModule reflector_module modules/mod_reflector.so +#LoadModule substitute_module modules/mod_substitute.so +#LoadModule sed_module modules/mod_sed.so +#LoadModule charset_lite_module modules/mod_charset_lite.so +#LoadModule deflate_module modules/mod_deflate.so +#LoadModule xml2enc_module modules/mod_xml2enc.so +#LoadModule proxy_html_module modules/mod_proxy_html.so +#LoadModule brotli_module modules/mod_brotli.so +LoadModule mime_module modules/mod_mime.so +#LoadModule ldap_module modules/mod_ldap.so +LoadModule log_config_module modules/mod_log_config.so +#LoadModule log_debug_module modules/mod_log_debug.so +#LoadModule log_forensic_module modules/mod_log_forensic.so +#LoadModule logio_module modules/mod_logio.so +#LoadModule lua_module modules/mod_lua.so +LoadModule env_module modules/mod_env.so +#LoadModule mime_magic_module modules/mod_mime_magic.so +#LoadModule cern_meta_module modules/mod_cern_meta.so +#LoadModule expires_module modules/mod_expires.so +LoadModule headers_module modules/mod_headers.so +#LoadModule ident_module modules/mod_ident.so +#LoadModule usertrack_module modules/mod_usertrack.so +#LoadModule unique_id_module modules/mod_unique_id.so +LoadModule setenvif_module modules/mod_setenvif.so +LoadModule version_module modules/mod_version.so +#LoadModule remoteip_module modules/mod_remoteip.so +#LoadModule proxy_module modules/mod_proxy.so +#LoadModule proxy_connect_module modules/mod_proxy_connect.so +#LoadModule proxy_ftp_module modules/mod_proxy_ftp.so +#LoadModule proxy_http_module modules/mod_proxy_http.so +#LoadModule proxy_fcgi_module modules/mod_proxy_fcgi.so +#LoadModule proxy_scgi_module modules/mod_proxy_scgi.so +#LoadModule proxy_uwsgi_module modules/mod_proxy_uwsgi.so +#LoadModule proxy_fdpass_module modules/mod_proxy_fdpass.so +#LoadModule proxy_wstunnel_module modules/mod_proxy_wstunnel.so +#LoadModule proxy_ajp_module modules/mod_proxy_ajp.so +#LoadModule proxy_balancer_module modules/mod_proxy_balancer.so +#LoadModule proxy_express_module modules/mod_proxy_express.so +#LoadModule proxy_hcheck_module modules/mod_proxy_hcheck.so +#LoadModule session_module modules/mod_session.so +#LoadModule session_cookie_module modules/mod_session_cookie.so +#LoadModule session_crypto_module modules/mod_session_crypto.so +#LoadModule session_dbd_module modules/mod_session_dbd.so +#LoadModule slotmem_shm_module modules/mod_slotmem_shm.so +#LoadModule slotmem_plain_module modules/mod_slotmem_plain.so +LoadModule ssl_module modules/mod_ssl.so +#LoadModule optional_hook_export_module modules/mod_optional_hook_export.so +#LoadModule optional_hook_import_module modules/mod_optional_hook_import.so +#LoadModule optional_fn_import_module modules/mod_optional_fn_import.so +#LoadModule optional_fn_export_module modules/mod_optional_fn_export.so +#LoadModule dialup_module modules/mod_dialup.so +#LoadModule http2_module modules/mod_http2.so +#LoadModule proxy_http2_module modules/mod_proxy_http2.so +#LoadModule md_module modules/mod_md.so +#LoadModule lbmethod_byrequests_module modules/mod_lbmethod_byrequests.so +#LoadModule lbmethod_bytraffic_module modules/mod_lbmethod_bytraffic.so +#LoadModule lbmethod_bybusyness_module modules/mod_lbmethod_bybusyness.so +#LoadModule lbmethod_heartbeat_module modules/mod_lbmethod_heartbeat.so +LoadModule unixd_module modules/mod_unixd.so +#LoadModule heartbeat_module modules/mod_heartbeat.so +#LoadModule heartmonitor_module modules/mod_heartmonitor.so +#LoadModule dav_module modules/mod_dav.so +LoadModule status_module modules/mod_status.so +LoadModule autoindex_module modules/mod_autoindex.so +#LoadModule asis_module modules/mod_asis.so +#LoadModule info_module modules/mod_info.so +#LoadModule suexec_module modules/mod_suexec.so + + #LoadModule cgid_module modules/mod_cgid.so + + + #LoadModule cgi_module modules/mod_cgi.so + +#LoadModule dav_fs_module modules/mod_dav_fs.so +#LoadModule dav_lock_module modules/mod_dav_lock.so +#LoadModule vhost_alias_module modules/mod_vhost_alias.so +#LoadModule negotiation_module modules/mod_negotiation.so +LoadModule dir_module modules/mod_dir.so +#LoadModule imagemap_module modules/mod_imagemap.so +#LoadModule actions_module modules/mod_actions.so +#LoadModule speling_module modules/mod_speling.so +#LoadModule userdir_module modules/mod_userdir.so +LoadModule alias_module modules/mod_alias.so +#LoadModule rewrite_module modules/mod_rewrite.so + + +# +# If you wish httpd to run as a different user or group, you must run +# httpd as root initially and it will switch. +# +# User/Group: The name (or #number) of the user/group to run httpd as. +# It is usually good practice to create a dedicated user and group for +# running httpd, as with most system services. +# +User www-data +Group www-data + + + +# 'Main' server configuration +# +# The directives in this section set up the values used by the 'main' +# server, which responds to any requests that aren't handled by a +# definition. These values also provide defaults for +# any containers you may define later in the file. +# +# All of these directives may appear inside containers, +# in which case these default settings will be overridden for the +# virtual host being defined. +# + +# +# ServerAdmin: Your address, where problems with the server should be +# e-mailed. This address appears on some server-generated pages, such +# as error documents. e.g. admin@your-domain.com +# +ServerAdmin you@example.com + +# +# ServerName gives the name and port that the server uses to identify itself. +# This can often be determined automatically, but we recommend you specify +# it explicitly to prevent problems during startup. +# +# If your host doesn't have a registered DNS name, enter its IP address here. +# +#ServerName www.example.com:80 + +# +# Deny access to the entirety of your server's filesystem. You must +# explicitly permit access to web content directories in other +# blocks below. +# + + AllowOverride none + Require all denied + + +# +# Note that from this point forward you must specifically allow +# particular features to be enabled - so if something's not working as +# you might expect, make sure that you have specifically enabled it +# below. +# + +# +# DocumentRoot: The directory out of which you will serve your +# documents. By default, all requests are taken from this directory, but +# symbolic links and aliases may be used to point to other locations. +# +DocumentRoot "/usr/local/apache2/htdocs-ssl" + + # + # Possible values for the Options directive are "None", "All", + # or any combination of: + # Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews + # + # Note that "MultiViews" must be named *explicitly* --- "Options All" + # doesn't give it to you. + # + # The Options directive is both complicated and important. Please see + # http://httpd.apache.org/docs/2.4/mod/core.html#options + # for more information. + # + Options Indexes FollowSymLinks + + # + # AllowOverride controls what directives may be placed in .htaccess files. + # It can be "All", "None", or any combination of the keywords: + # AllowOverride FileInfo AuthConfig Limit + # + AllowOverride None + + # + # Controls who can get stuff from this server. + # + Require all granted + + +# +# DirectoryIndex: sets the file that Apache will serve if a directory +# is requested. +# + + DirectoryIndex index.html + + +# +# The following lines prevent .htaccess and .htpasswd files from being +# viewed by Web clients. +# + + Require all denied + + +# +# ErrorLog: The location of the error log file. +# If you do not specify an ErrorLog directive within a +# container, error messages relating to that virtual host will be +# logged here. If you *do* define an error logfile for a +# container, that host's errors will be logged there and not here. +# +ErrorLog /proc/self/fd/2 + +# +# LogLevel: Control the number of messages logged to the error_log. +# Possible values include: debug, info, notice, warn, error, crit, +# alert, emerg. +# +LogLevel warn + + + # + # The following directives define some format nicknames for use with + # a CustomLog directive (see below). + # + LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined + LogFormat "%h %l %u %t \"%r\" %>s %b" common + + + # You need to enable mod_logio.c to use %I and %O + LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio + + + # + # The location and format of the access logfile (Common Logfile Format). + # If you do not define any access logfiles within a + # container, they will be logged here. Contrariwise, if you *do* + # define per- access logfiles, transactions will be + # logged therein and *not* in this file. + # + CustomLog /proc/self/fd/1 common + + # + # If you prefer a logfile with access, agent, and referer information + # (Combined Logfile Format) you can use the following directive. + # + #CustomLog "logs/access_log" combined + + + + # + # Redirect: Allows you to tell clients about documents that used to + # exist in your server's namespace, but do not anymore. The client + # will make a new request for the document at its new location. + # Example: + # Redirect permanent /foo http://www.example.com/bar + + # + # Alias: Maps web paths into filesystem paths and is used to + # access content that does not live under the DocumentRoot. + # Example: + # Alias /webpath /full/filesystem/path + # + # If you include a trailing / on /webpath then the server will + # require it to be present in the URL. You will also likely + # need to provide a section to allow access to + # the filesystem path. + + # + # ScriptAlias: This controls which directories contain server scripts. + # ScriptAliases are essentially the same as Aliases, except that + # documents in the target directory are treated as applications and + # run by the server when requested rather than as documents sent to the + # client. The same rules about trailing "/" apply to ScriptAlias + # directives as to Alias. + # + ScriptAlias /cgi-bin/ "/usr/local/apache2/cgi-bin/" + + + + + # + # ScriptSock: On threaded servers, designate the path to the UNIX + # socket used to communicate with the CGI daemon of mod_cgid. + # + #Scriptsock cgisock + + +# +# "/usr/local/apache2/cgi-bin" should be changed to whatever your ScriptAliased +# CGI directory exists, if you have that configured. +# + + AllowOverride None + Options None + Require all granted + + + + # + # Avoid passing HTTP_PROXY environment to CGI's on this or any proxied + # backend servers which have lingering "httpoxy" defects. + # 'Proxy' request header is undefined by the IETF, not listed by IANA + # + RequestHeader unset Proxy early + + + + # + # TypesConfig points to the file containing the list of mappings from + # filename extension to MIME-type. + # + TypesConfig conf/mime.types + + # + # AddType allows you to add to or override the MIME configuration + # file specified in TypesConfig for specific file types. + # + #AddType application/x-gzip .tgz + # + # AddEncoding allows you to have certain browsers uncompress + # information on the fly. Note: Not all browsers support this. + # + #AddEncoding x-compress .Z + #AddEncoding x-gzip .gz .tgz + # + # If the AddEncoding directives above are commented-out, then you + # probably should define those extensions to indicate media types: + # + AddType application/x-compress .Z + AddType application/x-gzip .gz .tgz + + # + # AddHandler allows you to map certain file extensions to "handlers": + # actions unrelated to filetype. These can be either built into the server + # or added with the Action directive (see below) + # + # To use CGI scripts outside of ScriptAliased directories: + # (You will also need to add "ExecCGI" to the "Options" directive.) + # + #AddHandler cgi-script .cgi + + # For type maps (negotiated resources): + #AddHandler type-map var + + # + # Filters allow you to process content before it is sent to the client. + # + # To parse .shtml files for server-side includes (SSI): + # (You will also need to add "Includes" to the "Options" directive.) + # + #AddType text/html .shtml + #AddOutputFilter INCLUDES .shtml + + +# +# The mod_mime_magic module allows the server to use various hints from the +# contents of the file itself to determine its type. The MIMEMagicFile +# directive tells the module where the hint definitions are located. +# +#MIMEMagicFile conf/magic + +# +# Customizable error responses come in three flavors: +# 1) plain text 2) local redirects 3) external redirects +# +# Some examples: +#ErrorDocument 500 "The server made a boo boo." +#ErrorDocument 404 /missing.html +#ErrorDocument 404 "/cgi-bin/missing_handler.pl" +#ErrorDocument 402 http://www.example.com/subscription_info.html +# + +# +# MaxRanges: Maximum number of Ranges in a request before +# returning the entire resource, or one of the special +# values 'default', 'none' or 'unlimited'. +# Default setting is to accept 200 Ranges. +#MaxRanges unlimited + +# +# EnableMMAP and EnableSendfile: On systems that support it, +# memory-mapping or the sendfile syscall may be used to deliver +# files. This usually improves server performance, but must +# be turned off when serving from networked-mounted +# filesystems or if support for these functions is otherwise +# broken on your system. +# Defaults: EnableMMAP On, EnableSendfile Off +# +#EnableMMAP off +#EnableSendfile on + +# Supplemental configuration +# +# The configuration files in the conf/extra/ directory can be +# included to add extra features or to modify the default configuration of +# the server, or you may simply copy their contents here and change as +# necessary. + +# Server-pool management (MPM specific) +#Include conf/extra/httpd-mpm.conf + +# Multi-language error messages +#Include conf/extra/httpd-multilang-errordoc.conf + +# Fancy directory listings +#Include conf/extra/httpd-autoindex.conf + +# Language settings +#Include conf/extra/httpd-languages.conf + +# User home directories +#Include conf/extra/httpd-userdir.conf + +# Real-time info on requests and configuration +#Include conf/extra/httpd-info.conf + +# Virtual hosts +#Include conf/extra/httpd-vhosts.conf + +# Local access to the Apache HTTP Server Manual +#Include conf/extra/httpd-manual.conf + +# Distributed authoring and versioning (WebDAV) +#Include conf/extra/httpd-dav.conf + +# Various default settings +#Include conf/extra/httpd-default.conf + +# Configure mod_proxy_html to understand HTML4/XHTML1 + +Include conf/extra/proxy-html.conf + + +# Secure (SSL/TLS) connections +Include conf/extra/httpd-ssl.conf +# +# Note: The following must must be present to support +# starting without SSL on platforms with no /dev/random equivalent +# but a statically compiled-in mod_ssl. +# + +SSLRandomSeed startup builtin +SSLRandomSeed connect builtin + + diff --git a/distros/dataverse.no/configs/schema.xml b/distros/dataverse.no/configs/schema.xml new file mode 100644 index 0000000..1718962 --- /dev/null +++ b/distros/dataverse.no/configs/schema.xml @@ -0,0 +1,1546 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml new file mode 100755 index 0000000..a625eba --- /dev/null +++ b/distros/dataverse.no/docker-compose.yaml @@ -0,0 +1,282 @@ +version: '3.7' +# Settings and configurations that are common for all containers + +services: + reverse-proxy: + # The official v2 Traefik docker image + image: traefik:v2.2 + # Enables the web UI and tells Traefik to listen to docker + container_name: traefik + command: + - "--api.insecure=true" + - "--providers.docker=true" + - "--providers.docker.exposedbydefault=false" + - --providers.docker.network=traefik + - "--entrypoints.web.address=:80" + - "--entrypoints.websecure.address=:443" + - "--entrypoints.web.http.redirections.entryPoint.to=websecure" + - "--entrypoints.web.http.redirections.entryPoint.scheme=https" + - "--entrypoints.web.http.redirections.entryPoint.priority=10" # disable permanent forwarding for every route + - --providers.file.filename=/var/traefik2/certs/certificates.toml + - --providers.file.watch=true + hostname: ${hostname} + networks: + - traefik + ports: + - 80:80 + - 9443:443 + volumes: + - /etc/localtime:/etc/localtime:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + - "${CONFIGURATION_PATH}/configuration/files:/var/traefik2/certs" + - "${CONFIGURATION_PATH}/configuration/:/configuration/" + + postgres: + networks: + - traefik + container_name: postgres + ports: + - "5433:5432" + image: postgres:10.13 + restart: unless-stopped + + environment: + - "LC_ALL=C.UTF-8" + - "POSTGRES_DB" + - "POSTGRES_USER" + - "POSTGRES_PASSWORD" + - "POSTGRES_PORT" + volumes: + - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down + # - /mntblob/database-data-prod:/var/lib/postgresql/data/ + # - /extdisk/database-data-demo:/var/lib/postgresql/data/ + + + shibboleth: + networks: + - traefik + image: ${DOCKER_HUB}/shibboleth:3.1.0 + container_name: shibboleth + privileged: true + ports: + - "8089:80" + - "443:9443" + volumes: + - ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth + - ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf + - ${CONFIGURATION_PATH}/configuration/files/${hostname}.pem:/etc/pki/tls/certs/localhost.crt + - ${CONFIGURATION_PATH}/configuration/files/${hostname}.key:/etc/pki/tls/private/localhost.key + hostname: ${hostname} + labels: + - "traefik.enable=true" + - traefik.http.routers.shibboleth-web.rule=Host(`${traefikhost}`) && PathPrefix(`/Shibboleth.sso`) + - traefik.http.routers.shibboleth-web-secure.rule=Host(`${traefikhost}`) && PathPrefix(`/Shibboleth.sso`) + - traefik.http.routers.shibboleth-web-secure.tls=true + - traefik.http.routers.shibboleth-web-secure.entrypoints=websecure + - "traefik.http.services.shibboleth-web-secure.loadbalancer.server.port=9443" + + solr: + networks: + - traefik + image: ${DOCKER_HUB}/solr:8.9.0 + container_name: solr + privileged: true + ports: + - "8984:8983" + environment: + - "SOLR_HOST=solr" + - "SOLR_PORT=8983" + - "SOLR_JAVA_MEM=-Xms4g -Xmx4g" + - "SOLR_OPTS=-Dlog4j2.formatMsgNoLookups=true" + volumes: + - solr-data:/opt/solr/server/solr/collection1/data + - ./configs/schema.xml:/var/solr/data/collection1/conf/schema.xml + labels: + - "traefik.enable=true" + - "traefik.http.routers.solr.rule=Host(`solr.${traefikhost}`)" + - "traefik.http.services.solr.loadbalancer.server.port=8983" + - "traefik.http.routers.solr.tls=true" + - "traefik.http.routers.solr.tls.certresolver=myresolver" + + phppgadmin: + networks: + - traefik + image: marvambass/phppgadmin + container_name: phppgadmin + restart: always + environment: + - DISABLE_TLS=disable + - DB_HOST=postgres + ports: + - 8095:80 + + minio: + # Please use fixed versions :D + image: minio/minio:RELEASE.2022-03-22T02-05-10Z + container_name: minio + user: root + networks: + - traefik + volumes: + - ${CONFIGURATION_PATH}/secrets/minio:/run/secrets + - ${LOCAL_STORAGE}:/data + - /mnt:/mnt + #- /mnt/minio-data:/data/.minio.sys/tmp + ports: + - "9000:9000" + - "9017:9017" + command: + - server + - /data + - --console-address + - ":9017" + environment: + - "MINIO_ROOT_USER" + - "MINIO_ROOT_PASSWORD" + - "MINIO_BROWSER=off" + #- "MINIO_ACCESS_KEY_FILE=/run/secrets/minio_access_key" + #- "MINIO_SECRET_KEY_FILE=/run/secrets/minio_secret_key" + # Do NOT use MINIO_DOMAIN or MINIO_SERVER_URL with Traefik. + # All Routing is done by Traefik, just tell minio where to redirect to. + - MINIO_BROWSER_REDIRECT_URL=http://stash.localhost + labels: + - traefik.enable=true + - traefik.http.routers.minio.service=minio + - "traefik.http.routers.minio.rule=Host(`${traefikhost}`) && PathPrefix(`/storage`)" + - "traefik.http.services.minio.loadbalancer.server.port=9016" + #- traefik.http.routers.minio.rule=Host(`minio.${traefikhost}`) + #- traefik.http.services.minio.loadbalancer.server.port=9000 + - traefik.http.routers.minio-console.service=minio-console + #- traefik.http.routers.minio-console.rule=Host(`minio-stash.${traefikhost}`) + - "traefik.http.routers.minio-console.rule=Host(`${traefikhost}`) && PathPrefix(`/console`)" + - "traefik.http.services.minio-console.loadbalancer.server.port=9017" + - "traefik.http.routers.minio.tls=true" + - "traefik.http.routers.minio.tls.certresolver=myresolver" + - "traefik.http.routers.minio-console.tls=true" + - "traefik.http.routers.minio-console.tls.certresolver=myresolver" + + whoami: + networks: + - traefik + image: "containous/whoami" + container_name: "whoami" + labels: + - "traefik.enable=true" + # - "traefik.http.routers.whoami.entrypoints=web" + - "traefik.http.routers.whoami.rule=Host(`${traefikhost}`) && PathPrefix(`/whoami`)" + - "traefik.http.routers.whoami.tls=true" + - "traefik.http.routers.whoami.tls.certresolver=myresolver" + + dataverse: + networks: + - traefik + image: coronawhy/dataverse:${VERSION} + container_name: dataverse + privileged: true + user: + "root" + ports: + #- "443:443" + - "4849:4848" + - "8088:8088" + - "8080:8080" + - "8099:8009" + extra_hosts: + - "${traefikhost}:51.105.181.173" + environment: + - "CVM_SERVER_NAME=CESSDA" #Optional + - "CESSDA" + - "CLARIN" + - "doi_authority" + - "doi_provider" + - "doi_username" + - "doi_password" + - "dataciterestapiurlstring" + - "baseurlstring" + - "aws_bucket_name" + - "aws_s3_profile" + - "aws_endpoint_url" + - "aws_uit_bucket_name" + - "aws_uit_s3_profile" + - "system_email" + - "mailhost" + - "mailuser" + - "no_reply_email" + - "smtp_password" + - "smtp_port" + - "socket_port" + - "federated_json_file" + - "bucketname_1" + - "minio_label_1" + - "minio_bucket_1" + - "minio_custom_endpoint" + - "bucketname_2" + - "minio_profile_1" + - "minio_label_2" + - "minio_bucket_2" + - "minio_profile_2" + - "DATAVERSE_DB_HOST" + - "DATAVERSE_DB_USER" + - "DATAVERSE_DB_PASSWORD" + - "DATAVERSE_DB_NAME" + - "DATAVERSE_SERVICE_HOST" + - "DATAVERSE_URL" + - "SOLR_SERVICE_HOST" + - "SOLR_SERVICE_PORT" + - "CVM_SERVER_URL=https://ns.${traefikhost}" + - "CVM_TSV_SOURCE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvocdemo.tsv" + - "1WAR_FILE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/dataverse-5.3-cv.war" + - "WAR_FILE=https://github.com/IQSS/dataverse/releases/download/v5.3/dataverse-5.3.war" + - "CVM_SQL=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cv-update.sql" + - "CVM_CONFIG=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvoc-conf.json" + - "LANG=en" + - "MAINLANG" + - "cvManager=http://" + - "BUNDLEPROPERTIES=Bundle.properties" + - "ADMIN_EMAIL=admin@localhost" + - "MAIL_SERVER=mailrelay" + - "SOLR_LOCATION=solr:8983" + - "INIT_SCRIPTS_FOLDER" + - "hostname" + - "POSTGRES_SERVER" + - "POSTGRES_PORT" + - "POSTGRES_DATABASE" + - "POSTGRES_USER" + - "POSTGRES_PASSWORD" + - "PGPASSWORD" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + - "JVM_OPTS='-Xmx4g -Xms4g -XX:MaxPermSize=4g -XX:PermSize=4g'" + depends_on: + - postgres + - solr + volumes: + - ${CONFIGURATION_PATH}/secrets:/secrets + - ${LOCAL_STORAGE}/data:/data + - ${DOCROOT}/docroot:/opt/docroot + - ./configs/domain.xml:/opt/payara/domain.xml + - ./init.d:/opt/payara/init.d + - /mnt:/mnt + labels: + - "traefik.enable=true" + - "traefik.http.routers.dataverse.rule=Host(`${traefikhost}`)" + - "traefik.http.services.dataverse.loadbalancer.server.port=8080" + - "traefik.http.routers.dataverse.tls=true" + - "traefik.http.routers.dataverse.tls.certresolver=myresolver" +volumes: + solr-data: + data1-1: + data1-2: + data2-1: + data2-2: + data3-1: + data3-2: + data4-1: + data4-2: + +networks: + traefik: + external: true diff --git a/distros/dataverse.no/init.d/006-s3-aws-storage.sh b/distros/dataverse.no/init.d/006-s3-aws-storage.sh new file mode 100755 index 0000000..de69d4d --- /dev/null +++ b/distros/dataverse.no/init.d/006-s3-aws-storage.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# AWS Bucket for Dataverse +# https://guides.dataverse.org/en/latest/installation/config.html#id90 +if [ "${aws_bucket_name}" ]; then + cp -R /secrets/aws-cli/.aws /root/.aws + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.type\=s3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.label\=S3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.bucket-name\=${aws_bucket_name}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.download-redirect\=true" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.upload-redirect=true" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=53687091200" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.url-expiration-minutes\=120" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.connection-pool-size\=4096" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.storage-driver-id\=S3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.profile\=${aws_s3_profile}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.custom-endpoint-url\=${aws_endpoint_url}" + curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "native/http" +fi diff --git a/distros/dataverse.no/init.d/007-s3-aws-storage.sh b/distros/dataverse.no/init.d/007-s3-aws-storage.sh new file mode 100755 index 0000000..3b0eba0 --- /dev/null +++ b/distros/dataverse.no/init.d/007-s3-aws-storage.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# AWS Bucket for Dataverse +# https://guides.dataverse.org/en/latest/installation/config.html#id90 +if [ "${aws_uit_bucket_name}" ]; then + cp -R /secrets/aws-cli/.aws /root/.aws + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.type\=s3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.label\=S3uit" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.bucket-name\=${aws_uit_bucket_name}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.download-redirect\=true" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.upload-redirect=true" + # asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.min-part-size=53687091200" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.url-expiration-minutes\=120" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.connection-pool-size\=4096" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.storage-driver-id\=S3uit" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.profile\=${aws_uit_s3_profile}" + # asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.custom-endpoint-url\=${aws_endpoint_url}" + curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "native/http" +fi diff --git a/distros/dataverse.no/init.d/01-persistent-id.sh b/distros/dataverse.no/init.d/01-persistent-id.sh new file mode 100755 index 0000000..98581e7 --- /dev/null +++ b/distros/dataverse.no/init.d/01-persistent-id.sh @@ -0,0 +1,22 @@ +#!/bin/bash +echo "Setting up the settings" >> /tmp/status.log +echo "- Allow internal signup" >> /tmp/status.log +SERVER=http://${DATAVERSE_URL}/api +echo $SERVER +curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp" +curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl" +curl -X PUT -d CV "$SERVER/admin/settings/:CV" +curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY +curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy +curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods +curl -X PUT -d solr:8983 "$SERVER/admin/settings/:SolrHostColonPort" +echo + +# Demo server with FAKE DOIs if doi_authority is empty +if [ -z "${doi_authority}" ]; then + curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" + curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority" + curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" + curl -X PUT -d FAKE "$SERVER/admin/settings/:DoiProvider" +fi + diff --git a/distros/dataverse.no/init.d/010-mailrelay-set.sh b/distros/dataverse.no/init.d/010-mailrelay-set.sh new file mode 100755 index 0000000..4e6ddb9 --- /dev/null +++ b/distros/dataverse.no/init.d/010-mailrelay-set.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# Setup mail relay +# https://guides.dataverse.org/en/latest/developers/troubleshooting.html +if [ "${system_email}" ]; then + curl -X PUT -d ${system_email} http://localhost:8080/api/admin/settings/:SystemEmail + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} delete-javamail-resource mail/notifyMailSession + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-javamail-resource --mailhost ${mailhost} --mailuser ${mailuser} --fromaddress ${no_reply_email} --property mail.smtp.auth=false:mail.smtp.password=${smtp_password}:mail.smtp.port=${smtp_port}:mail.smtp.socketFactory.port=${socket_port}:mail.smtp.socketFactory.fallback=false mail/notifyMailSession +fi diff --git a/distros/dataverse.no/init.d/011-local-storage.sh b/distros/dataverse.no/init.d/011-local-storage.sh new file mode 100755 index 0000000..59c2602 --- /dev/null +++ b/distros/dataverse.no/init.d/011-local-storage.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +# Enable file folder in local storage +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.type\=file" +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.label\=file" +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.directory\=/data" diff --git a/distros/dataverse.no/init.d/012-minio-bucket1.sh b/distros/dataverse.no/init.d/012-minio-bucket1.sh new file mode 100755 index 0000000..6fe5e7b --- /dev/null +++ b/distros/dataverse.no/init.d/012-minio-bucket1.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +# MinIO bucket activation +# https://guides.dataverse.org/en/latest/installation/config.html#id87 +if [ "${minio_label_1}" ]; then + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.type\=s3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.label\=${minio_label_1}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.bucket-name\=${minio_bucket_1}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.download-redirect\=false" +# asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.upload-redirect=true" +# asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.ingestsizelimit=13107200" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.url-expiration-minutes\=120" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.connection-pool-size\=4096" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.profile\=${minio_profile_1}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.custom-endpoint-url\=${minio_custom_endpoint}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.path-style-access\=true" +fi diff --git a/distros/dataverse.no/init.d/021-jhove-set-link.sh b/distros/dataverse.no/init.d/021-jhove-set-link.sh new file mode 100755 index 0000000..6801cd0 --- /dev/null +++ b/distros/dataverse.no/init.d/021-jhove-set-link.sh @@ -0,0 +1,3 @@ +ln -s /opt/payara/dvinstall/jhove.conf /opt/payara/appserver/glassfish/domains/domain1/config/jhove.conf +ln -s /opt/payara/dvinstall/jhoveConfig.xsd /opt/payara/appserver/glassfish/domains/domain1/config/jhoveConfig.xsd + diff --git a/distros/dataverse.no/init.d/022-splitpath.sh b/distros/dataverse.no/init.d/022-splitpath.sh new file mode 100755 index 0000000..740bef3 --- /dev/null +++ b/distros/dataverse.no/init.d/022-splitpath.sh @@ -0,0 +1,3 @@ +#!/bin/bash + + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=6553600" diff --git a/distros/dataverse.no/init.d/03-doi-set.sh b/distros/dataverse.no/init.d/03-doi-set.sh new file mode 100755 index 0000000..29d3781 --- /dev/null +++ b/distros/dataverse.no/init.d/03-doi-set.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# Setup DOI parameters +# https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring +if [ "${doi_authority}" ]; then + curl -X PUT -d ${doi_authority} http://localhost:8080/api/admin/settings/:Authority + curl -X PUT -d ${doi_provider} http://localhost:8080/api/admin/settings/:DoiProvider + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.username\=${doi_username}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.password\=${doi_password}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.dataciterestapiurlstring\=${dataciterestapiurlstring}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.baseurlstring\=${baseurlstring}" + if [ "${doi_shoulder}" ]; then + curl -X PUT -d "${doi_shoulder}/" "$SERVER/admin/settings/:Shoulder" + fi +fi diff --git a/distros/dataverse.no/init.d/04-setdomain.sh b/distros/dataverse.no/init.d/04-setdomain.sh new file mode 100755 index 0000000..065e83a --- /dev/null +++ b/distros/dataverse.no/init.d/04-setdomain.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# Setup domain name +hostname=${hostname} +echo $hostname +hostnamecmd=dataverse.fqdn=${hostname} +echo $hostnamecmd +siteURLcmd=dataverse.siteUrl='https\:\/\/'${hostname} +echo $siteURLcmd +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $siteURLcmd +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $hostnamecmd diff --git a/distros/dataverse.no/init.d/07-previewers.sh b/distros/dataverse.no/init.d/07-previewers.sh new file mode 100755 index 0000000..d71bb86 --- /dev/null +++ b/distros/dataverse.no/init.d/07-previewers.sh @@ -0,0 +1,38 @@ +#!/bin/bash + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }" + + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }" + diff --git a/distros/dataverse.no/init.d/08-federated-login.sh b/distros/dataverse.no/init.d/08-federated-login.sh new file mode 100755 index 0000000..31208cd --- /dev/null +++ b/distros/dataverse.no/init.d/08-federated-login.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Federated login activation +# https://guides.dataverse.org/en/latest/installation/shibboleth.html +if [ "${federated_json_file}" ]; then + curl -X POST -H 'Content-type: application/json' --upload-file ${federated_json_file} http://localhost:8080/api/admin/authenticationProviders +fi diff --git a/distros/dataverse.no/init.d/105-reindex.sh b/distros/dataverse.no/init.d/105-reindex.sh new file mode 100755 index 0000000..465d5b0 --- /dev/null +++ b/distros/dataverse.no/init.d/105-reindex.sh @@ -0,0 +1,3 @@ +# Reindex all datasets +curl http://localhost:8080/api/admin/index/clear +curl http://localhost:8080/api/admin/index diff --git a/distros/dataverse.no/init.d/cors.json b/distros/dataverse.no/init.d/cors.json new file mode 100644 index 0000000..3bd9c03 --- /dev/null +++ b/distros/dataverse.no/init.d/cors.json @@ -0,0 +1,10 @@ +{ + "CORSRules": [ + { + "AllowedOrigins": ["*"], + "AllowedHeaders": ["*"], + "AllowedMethods": ["PUT", "GET"], + "ExposeHeaders": ["ETag"] + } + ] +} From bbfbb99db872a7ab92042a096ec9b0374e6ff17a Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 20 May 2022 08:29:35 +0000 Subject: [PATCH 002/354] fixed nb of parts on file upload --- distros/dataverse.no/init.d/006-s3-aws-storage.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/006-s3-aws-storage.sh b/distros/dataverse.no/init.d/006-s3-aws-storage.sh index de69d4d..02e7e6b 100755 --- a/distros/dataverse.no/init.d/006-s3-aws-storage.sh +++ b/distros/dataverse.no/init.d/006-s3-aws-storage.sh @@ -9,7 +9,7 @@ if [ "${aws_bucket_name}" ]; then asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.bucket-name\=${aws_bucket_name}" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.download-redirect\=true" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.upload-redirect=true" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=53687091200" + # asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=53687091200" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.url-expiration-minutes\=120" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.connection-pool-size\=4096" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.storage-driver-id\=S3" From b16848498d6542c8576960bf636a9f6f6e7cc65e Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 24 May 2022 08:26:53 +0000 Subject: [PATCH 003/354] adde afiliation support and .env to gitignore --- .../init.d/affiliations/affiliation2data.py | 21 ++++ .../init.d/affiliations/affiliations.csv | 108 ++++++++++++++++++ .../init.d/affiliations/affiliations.sql | 107 +++++++++++++++++ .../init.d/affiliations/extratrigger.sql | 30 +++++ 4 files changed, 266 insertions(+) create mode 100644 distros/dataverse.no/init.d/affiliations/affiliation2data.py create mode 100644 distros/dataverse.no/init.d/affiliations/affiliations.csv create mode 100644 distros/dataverse.no/init.d/affiliations/affiliations.sql create mode 100644 distros/dataverse.no/init.d/affiliations/extratrigger.sql diff --git a/distros/dataverse.no/init.d/affiliations/affiliation2data.py b/distros/dataverse.no/init.d/affiliations/affiliation2data.py new file mode 100644 index 0000000..31e4f4d --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/affiliation2data.py @@ -0,0 +1,21 @@ +import pandas as pd + +# id | dvno_affiliation | dvno_group_name | dvno_email_level +#-----+-------------------------------------+------------------+------------------ +# 139 | UiT The Arctic University of Norway | uit.no | 2 +# 27 | Ostfold University College | hiof.no | 2 +# 4 | Akvaplan-niva | akvaplan.niva.no | 3 + +file = '/distrib/private/affiliations.csv' +#print(pd.read_csv(open(file, errors='replace'))) +affiliations = pd.read_csv(file) +for i in affiliations.index: + #print(affiliations.iloc[[i]]['dvno_group_name']) + #print("%s %s" % (affiliations.iloc[[i]]['dvno_group_name'].astype(str), affiliations.iloc[[i]]['dvno_affiliation'].astype(str))) + #print(str(affiliations.iloc[[i]]['id'].values[0])) + #print(str(affiliations.iloc[[i]]['dvno_group_name'].values[0])) + #print(str(affiliations.iloc[[i]]['dvno_affiliation'].values[0])) + dvno_email_level = len(str(affiliations.iloc[[i]]['dvno_group_name']).split('.')) + #print(subdomains) + sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliations.iloc[[i]]['dvno_affiliation'].values[0], affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level) + print(sql) diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.csv b/distros/dataverse.no/init.d/affiliations/affiliations.csv new file mode 100644 index 0000000..fc8a230 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/affiliations.csv @@ -0,0 +1,108 @@ +id,dvno_group_name,dvno_group_id_explicit,dvno_group_id,dvno_membership,dvno_users,dvno_affiliation,feide_id,feide_email,feide_name,feide_nb,feide_nn,feide_en,feide_se,ror_id,ror_name,ror_acronym,comments +1,phs.no,&explicit/1-phsno,phsno,No Members,No,The Norwegian Police University College,54,phs.no,The Norwegian Police University College,Politihøgskolen,Politihøgskulen,The Norwegian Police University College,,https://ror.org/05486d596,Norwegian Police University College,PHS, +2,uia.no,&explicit/1-uiano,uiano,"24 users, 0 groups",24,University of Agder,55,uia.no,University of Agder,Universitetet i Agder,Universitetet i Agder,University of Agder,Agder Universitehta,https://ror.org/03x297z98,University of Agder,, +3,nifu.no,&explicit/1-nifuno,nifuno,No Members,No,NIFU,58,nifu.no,NIFU,NIFU,NIFU,NIFU,,,,, +4,hiof.no,&explicit/1-hiofno,hiofno,"7 users, 0 groups",7,Østfold University College,70,hiof.no,Østfold University College,Høgskolen i Østfold,Høgskulen i Østfold,Østfold University College,Østfold Allaskuvla,https://ror.org/04gf7fp41,Østfold University College,HiØ, +5,aho.no,&explicit/1-ahono,ahono,No Members,No,The Oslo School of Architecture and Design,93,aho.no,The Oslo School of Architecture and Design,Arkitektur- og designhøgskolen i Oslo,Arkitektur- og designhøgskulen i Oslo,The Oslo School of Architecture and Design,,,,, +6,cmi.no,&explicit/1-cmino,cmino,No Members,No,Chr. Michelsen Institute,96,cmi.no,Chr. Michelsen Institute,Chr. Michelsens Institutt,Chr. Michelsens Institutt,Chr. Michelsen Institute,,https://ror.org/02w7rbf39,Christian Michelsen Institute,CMI, +7,mf.no,&explicit/1-mfno,mfno,No Members,No,"MF Norwegian School of Theology, Religion and Society",100,mf.no,"MF Norwegian School of Theology, Religion and Society",MF vitenskapelig høyskole,MF vitenskapelig høyskole,"MF Norwegian School of Theology, Religion and Society",,https://ror.org/01qafy255,"MF Norwegian School of Theology, Religion and Society",, +8,dmmh.no,&explicit/1-dmmhno,dmmhno,No Members,No,Queen Maud University College,103,dmmh.no,Queen Maud University College,Dronning Mauds Minne Høgskole,Dronning Mauds Minne Høgskule,Queen Maud University College,,https://ror.org/043zemc40,Queen Maud University College,DMMH, +9,nhh.no,&explicit/1-nhhno,nhhno,No Members,No,Norwegian School of Economics,119,nhh.no,Norwegian School of Economics,Norges Handelshøyskole,Norges Handelshøyskole,Norwegian School of Economics,,https://ror.org/04v53s997,Norwegian School of Economics,NHH, +10,nla.no,&explicit/1-nlano,nlano,No Members,No,NLA University College,123,nla.no,NLA University College,NLA Høgskolen,NLA Høgskolen,NLA University College,NLA Høgskolen,https://ror.org/05fdt2q64,NLA University College,, +11,npolar.no,&explicit/1-npolarno,npolarno,No Members,No,Norwegian Polar Institute,124,,Norwegian Polar Institute,Norsk Polarinstitutt,,Norwegian Polar Institute,,https://ror.org/03avf6522,Norwegian Polar Institute,, +12,nr.no,&explicit/1-nrno,nrno,No Members,No,Norwegian Computing Center,125,nr.no,Norsk Regnesentral,Norsk Regnesentral,,,,https://ror.org/02gm7te43,Norwegian Computing Center,NR, +13,sintef.no,&explicit/1-sintefno,sintefno,"1 user, 0 groups",1,SINTEF,131,sintef.no,SINTEF,SINTEF,SINTEF,SINTEF,,https://ror.org/01f677e56,SINTEF,, +14,samiskhs.no,&explicit/1-samiskhsno,samiskhsno,No Members,No,Sámi allaskuvla – Sámi University College,133,samas.no,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,Sámi allaskuvla – Samisk høgskule,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,https://ror.org/028ahgk39,Sámi University of Applied Sciences,, +15,uib.no,&explicit/1-uibno,uibno,"76 users, 0 groups",76,University of Bergen,137,uib.no,University of Bergen,Universitetet i Bergen,Universitetet i Bergen,University of Bergen,Bergen Universitehta,https://ror.org/03zga2b32,University of Bergen,, +16,uio.no,&explicit/1-uiono,uiono,"33 users, 0 groups",33,University of Oslo,138,uio.no,University of Oslo,Universitetet i Oslo,Universitetet i Oslo,University of Oslo,Oslo Universitehta,https://ror.org/01xtthb56,University of Oslo,UiO, +17,uit.no,&explicit/1-uitno,uitno,"341 users, 0 groups",341,UiT The Arctic University of Norway,139,uit.no,UiT The Arctic University of Norway,UiT Norges arktiske universitet,UiT Noregs arktiske universitet,UiT The Arctic University of Norway,UiT Norgga árktalaš universitehta,https://ror.org/00wge5k78,UiT The Arctic University of Norway,UiT, +18,ntnu.no,&explicit/1-ntnuno,ntnuno,"142 users, 0 groups",142,Norwegian University of Science and Technology,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU, +19,nina.no,&explicit/1-ninano,ninano,No Members,No,Norwegian Institute for Nature Research,144,nina.no,NINA,NINA,NINA,,,https://ror.org/04aha0598,Norwegian Institute for Nature Research,NINA, +20,ngu.no,&explicit/1-nguno,nguno,"1 user, 0 groups",1,Geological Survey of Norway,145,ngu.no,Geological Survey of Norway,Norges geologiske undersøkelse,Norges geologiske undersøkelse,Geological Survey of Norway,,,,, +21,himolde.no,&explicit/1-himoldeno,himoldeno,No Members,No,Molde University College,158,himolde.no,Molde University College,Høgskolen i Molde,Høgskulen i Molde,Molde University College,Molde Allaskuvla,https://ror.org/00kxjcd28,Molde University College,HiM, +22,nb.no,&explicit/1-nbno,nbno,No Members,No,National Library of Norway,160,nb.no,National Library of Norway,Nasjonalbiblioteket,Nasjonalbiblioteket,National Library of Norway,,,,, +23,uis.no,&explicit/1-uisno,uisno,"14 users, 0 groups",14,University of Stavanger,163,uis.no,University of Stavanger,Universitetet i Stavanger,Universitetet i Stavanger,University of Stavanger,Stavanger Universitehta,https://ror.org/02qte9q33,University of Stavanger,UiS, +24,hivolda.no,&explicit/1-hivoldano,hivoldano,No Members,No,Volda University College,165,hivolda.no,Volda University College,Høgskulen i Volda,Høgskulen i Volda,Volda University College,Volda Allaskuvla,https://ror.org/01eeqzy24,Volda University College,HVO, +25,khio.no,&explicit/1-khiono,khiono,No Members,No,Oslo National Academy of the Arts,195,khio.no,Oslo National Academy of the Arts,Kunsthøgskolen i Oslo,Kunsthøgskulen i Oslo,Oslo National Academy of the Arts,,https://ror.org/0543h9a62,Oslo National Academy of the Arts,, +26,samfunnsforskning.no,&explicit/1-samfunnsforskningno,samfunnsforskningno,No Members,No,Institute for Social Research,197,samfunnsforskning.no,Institute for Social Research,Institutt for samfunnsforskning,,Institute for Social Research,,https://ror.org/05swz5441,Institute for Social Research,IFS, +27,ldh.no,&explicit/1-ldhno,ldhno,No Members,No,Lovisenberg Diaconal University College,216,ldh.no,Lovisenberg diaconal university college,Lovisenberg diakonale høgskole,Lovisenberg diakonale høgskule,Lovisenberg diaconal university college,,https://ror.org/015rzvz05,Lovisenberg Diakonale Høgskole,LDH, +28,fhi.no,&explicit/1-fhino,fhino,No Members,No,Norwegian Institute of Public Health,310,fhi.no,Norwegian Institute of Public Health,Folkehelseinstituttet,,Norwegian Institute of Public Health,,https://ror.org/046nvst19,Norwegian Institute of Public Health,NIPH, +29,nih.no,&explicit/1-nihno,nihno,No Members,No,Norwegian School of Sport Sciences,323,nih.no,Norwegian School of Sport Sciences,Norges idrettshøgskole,Noregs idrettshøgskule,Norwegian School of Sport Sciences,,https://ror.org/045016w83,Norwegian School of Sport Sciences,NSSS, +30,bi.no,&explicit/1-bino,bino,No Members,No,BI Norwegian Business School,324,bi.no,BI Norwegian Business School,Handelshøyskolen BI,Handelshøyskolen BI,BI Norwegian Business School,,https://ror.org/03ez40v33,BI Norwegian Business School,, +31,nmh.no,&explicit/1-nmhno,nmhno,No Members,No,Norwegian Academy of Music,325,nmh.no,Norwegian Academy of Music,Norges musikkhøgskole,Noregs musikkhøgskule,Norwegian Academy of Music,,https://ror.org/052dy9793,Norwegian Academy of Music,NMH, +32,kristiania.no,&explicit/1-kristianiano,kristianiano,No Members,No,Kristiania University College,17007,feide.egms.no,Kristiania University College,Høyskolen Kristiania,Høyskolen Kristiania,Kristiania University College,,https://ror.org/03gss5916,Campus Kristiania,, +33,fhs.mil.no,&explicit/1-fhsmilno,fhsmilno,"1 user, 0 groups",1,Norwegian Defence University College,115267,mil.no,Norwegian Defence University College,Forsvarets høgskoler,Forsvarets høgskuler,Norwegian Defence University College,,https://ror.org/02vfz9j23,Norwegian Defence University College,NDUC, +34,ansgarskolen.no,&explicit/1-ansgarskolenno,ansgarskolenno,No Members,No,Ansgar University College,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,, +35,oslomet.no,&explicit/1-oslometno,oslometno,No Members,No,OsloMet – Oslo Metropolitan University,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA, +36,nmbu.no,&explicit/1-nmbuno,nmbuno,"48 users, 0 groups",48,Norwegian University of Life Sciences,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU, +37,nibio.no,&explicit/1-nibiono,nibiono,No Members,No,Norwegian Institute of Bioeconomy Research,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO, +38,vid.no,&explicit/1-vidno,vidno,No Members,No,VID Specialized University,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID, +39,nord.no,&explicit/1-nordno,nordno,"23 users, 0 groups",23,Nord University,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,, +40,usn.no,&explicit/1-usnno,usnno,No Members,No,University of South-Eastern Norway,2066647,usn.no,University of South-Eastern Norway,Universitetet i Sørøst-Norge,Universitetet i Søraust-Noreg,University of South-Eastern Norway,,https://ror.org/05ecg5h20,University of South-Eastern Norway,USN, +41,hvl.no,&explicit/1-hvlno,hvlno,"28 users, 0 groups",28,Western Norway University of Applied Sciences,2126357,hvl.no,Western Norway University of Applied Sciences,Høgskulen på Vestlandet,Høgskulen på Vestlandet,Western Norway University of Applied Sciences,,https://ror.org/05phns765,Western Norway University of Applied Sciences,HVL, +42,nkvts.no,&explicit/1-nkvtsno,nkvtsno,No Members,No,Norwegian centre for violence and traumatic stress studies,2127917,nkvts.no,Norwegian centre for violence and traumatic stress studies,Nasjonalt kunnskapssenter om vold og traumatisk stress,,Norwegian centre for violence and traumatic stress studies,,https://ror.org/01p618c36,Norwegian Centre for Violence and Traumatic Stress Studies,NKVTS, +43,inn.no,&explicit/1-innno,innno,"26 users, 0 groups",26,Inland Norway University of Applied Sciences,2128215,inn.no,Inland Norway University of Applied Sciences,Høgskolen i Innlandet,,Inland Norway University of Applied Sciences,,https://ror.org/02dx4dc92,Inland Norway University of Applied Sciences,, +44,vetinst.no,&explicit/1-vetinstno,vetinstno,No Members,No,Norwegian Veterinary Institute,2217125,vetinst.no,Veterinærinstituttet,Veterinærinstituttet,,,,https://ror.org/05m6y3182,Norwegian Veterinary Institute,NVI, +45,nubu.no,&explicit/1-nubuno,nubuno,No Members,No,NUBU - The Norwegian Center for Child Behavioral Development,2217221,nubu.no,NUBU - The Norwegian Center for Child Behavioral Development,NUBU - Nasjonalt utviklingssenter for barn og unge,,NUBU - The Norwegian Center for Child Behavioral Development,,,,, +46,hlsenteret.no,&explicit/1-hlsenteretno,hlsenteretno,No Members,No,The Norwegian Center for Holocaust and Minority Studies,2217222,hlsenteret.no,The Norwegian Center for Holocaust and Minority Studies,Senter for studier av Holocaust og livssynsminoriteter,,The Norwegian Center for Holocaust and Minority Studies,,https://ror.org/03ppkyp25,Center for Studies of Holocaust and Religious Minorities,, +47,met.no,&explicit/1-metno,metno,No Members,No,Norwegian Meteorological Institute,2217341,,Meteorologisk Institutt,Meteorologisk Institutt,,,,https://ror.org/001n36p86,Norwegian Meteorological Institute,MET, +48,simula.no,&explicit/1-simulano,simulano,No Members,No,Simula Research Laboratory,2217477,simula.no,Simula,Simula,,,,https://ror.org/00vn06n10,Simula Research Laboratory,, +49,agderforskning.no,&explicit/1-agderforskningno,agderforskningno,No Members,No,Agder Research,,,,,,,,https://ror.org/02k3w5n89,Agder Research,, +50,akvaplan.niva.no,&explicit/1-akvaplannivano,akvaplannivano,No Members,No,Akvaplan-niva,,,,,,,,https://ror.org/03nrps502,Akvaplan-niva (Norway),, +51,arbark.no,&explicit/1-arbarkno,arbarkno,No Members,No,Norwegian Labour Movement Archives and Library,,,,,,,,https://ror.org/05x91m338,Norwegian Labour Movement Archives and Library,, +52,cas.oslo.no,&explicit/1-casoslono,casoslono,No Members,No,Centre for Advanced Study,,,,,,,,https://ror.org/05rbhea42,Centre for Advanced Study,CAS, +53,cicero.oslo.no,&explicit/1-cicerooslono,cicerooslono,No Members,No,CICERO Center for International Climate Research,,,,,,,,https://ror.org/01gw5dy53,CICERO Center for International Climate Research,CICERO, +54,cmr.no,&explicit/1-cmrno,cmrno,No Members,No,Christian Michelsen Research,,,,,,,,,,,Now part of NORCE Norwegian Research Centre +55,dataverse.no,&explicit/1-dataverseno,dataverseno,No Members,No,,,,,,,,,,,, +56,DataverseNO Admin,&explicit/1-DataverseNOAdmin,DataverseNOAdmin,"3 users, 0 groups",3,,,,,,,,,,,, +57,DataverseNO Curator,&explicit/1-DvNOCurator,DvNOCurator,"4 users, 0 groups",4,,,,,,,,,,,, +58,DataverseNO Dataset Creator,&explicit/1-DataverseNODatasetCreator,DataverseNODatasetCreator,"4 users, 0 groups",4,,,,,,,,,,,, +59,diakonova.no,&explicit/1-diakonovano,diakonovano,No Members,No,Diakonova,,,,,,,,,,, +60,fafo.no,&explicit/1-fafono,fafono,No Members,No,Fafo Foundation,,,,,,,,https://ror.org/00ee9xb13,Fafo Foundation,, +61,ffi.no,&explicit/1-ffino,ffino,No Members,No,Norwegian Defence Research Establishment,,,,,,,,https://ror.org/0098gnz32,Norwegian Defence Research Establishment,FFI, +62,flymed.no,&explicit/1-flymedno,flymedno,No Members,No,Flymedisinsk institutt,,,,,,,,,,, +63,fni.no,&explicit/1-fnino,fnino,No Members,No,Fridtjof Nansen Institute,,,,,,,,https://ror.org/04ep2t954,Fridtjof Nansen Institute,FNI, +64,genok.no,&explicit/1-genokno,genokno,No Members,No,GenØk – Centre for Biosafety,,,,,,,,https://ror.org/027arfy53,GenØk,, +65,hi.no,&explicit/1-hino,hino,No Members,No,Norwegian Institute of Marine Research,,,,,,,,https://ror.org/05vg74d16,Norwegian Institute of Marine Research,IMR, +66,ife.no,&explicit/1-ifeno,ifeno,No Members,No,Institute for Energy Technology,,,,,,,,https://ror.org/02jqtg033,Institute for Energy Technology,IFE, +67,iris.no,&explicit/1-irisno,irisno,No Members,No,International Research Institute of Stavanger,,,,,,,,https://ror.org/0502t5s28,International Research Institute of Stavanger,IRIS,Now part of NORCE Norwegian Research Centre +68,kifo.no,&explicit/1-kifono,kifono,No Members,No,"Institute for Church, Religion, and Worldview Research",,,,,,,,https://ror.org/051p4t773,"Institute for Church, Religion, and Worldview Research",KIFO, +69,kreftregisteret.no,&explicit/1-kreftregisteretno,kreftregisteretno,No Members,No,Cancer Registry of Norway,,,,,,,,https://ror.org/03sm1ej59,Cancer Registry of Norway,CRN, +70,legeforeningen.no,&explicit/1-legeforeningenno,legeforeningenno,No Members,No,,,,,,,,,,,, +71,moreforsk.no,&explicit/1-moreforskno,moreforskno,No Members,No,Møreforsking,,,,,,,,https://ror.org/02w4kss89,Møreforsking (Norway),, +72,nersc.no,&explicit/1-nerscno,nerscno,No Members,No,Nansen Environmental and Remote Sensing Center,,,,,,,,,,, +73,nfms.no,&explicit/1-nfmsno,nfmsno,No Members,No,Aeromedical Center of Norway,,,,,,,,,,, +74,nforsk.no,&explicit/1-nforskno,nforskno,No Members,No,Nordland Research Institute,,,,,,,,https://ror.org/02wvb2a30,Nordland Research Institute,, +75,ngi.no,&explicit/1-ngino,ngino,No Members,No,Norwegian Geotechnical Institute,,,,,,,,https://ror.org/032ksge37,Norwegian Geotechnical Institute,NGI, +76,niku.no,&explicit/1-nikuno,nikuno,No Members,No,Norwegian Institute for Cultural Heritage Research,,,,,,,,https://ror.org/02xhrye98,Norwegian Institute for Cultural Heritage Research,NIKU, +77,nilu.no,&explicit/1-niluno,niluno,No Members,No,Norwegian Institute for Air Research,,,,,,,,https://ror.org/00q7d9z06,Norwegian Institute for Air Research,NILU, +78,niva.no,&explicit/1-nivano,nivano,No Members,No,Norwegian Institute for Water Research,,,,,,,,https://ror.org/03hrf8236,Norwegian Institute for Water Research,NIVA, +79,nlr.no,&explicit/1-nlrno,nlrno,No Members,No,Norsk Landbruksrådgiving,,,,,,,,https://ror.org/03c1zct07,Norsk Landbruksrådgiving,NLR, +80,nobel.no,&explicit/1-nobelno,nobelno,No Members,No,Norwegian Nobel Institute,,,,,,,,https://ror.org/055wgnw59,Norwegian Nobel Institute,, +81,nofima.no,&explicit/1-nofimano,nofimano,No Members,No,Nofima,,,,,,,,https://ror.org/02v1rsx93,Nofima,, +82,norceresearch.no,&explicit/1-norceresearchno,norceresearchno,"1 user, 0 groups",1,Norwegian Research Centre,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE, +83,norsar.no,&explicit/1-norsarno,norsarno,No Members,No,Norwegian Seismic Array,,,,,,,,https://ror.org/02vw8cm83,Norsar,, +84,norsok.no,&explicit/1-norsokno,norsokno,No Members,No,Norsk senter for økologisk landbruk,,,,,,,,,,, +85,norsus.no,&explicit/1-norsusno,norsusno,No Members,No,Norwegian Institute for Sustainability Research,,,,,,,,,,, +86,norut.no,&explicit/1-norutno,norutno,No Members,No,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre +87,nupi.no,&explicit/1-nupino,nupino,No Members,No,Norwegian Institute of International Affairs,,,,,,,,https://ror.org/01pznaa94,Norwegian Institute of International Affairs,NUPI, +88,ostfoldforskning.no,&explicit/1-ostfoldforskningno,ostfoldforskningno,No Members,No,Ostfold Research,,,,,,,,https://ror.org/01vmqaq17,Ostfold Research (Norway),,Has changed name to +89,ostforsk.no,&explicit/1-ostforskno,ostforskno,No Members,No,Eastern Norway Research Institute,,,,,,,,https://ror.org/020deqr47,Eastern Norway Research Institute,ENRI, +90,pfi.no,&explicit/1-pfino,pfino,No Members,No,Paper and Fibre Research Institute,,,,,,,,https://ror.org/053qb6g74,Paper and Fibre Research Institute,PFI, +91,prio.org,&explicit/1-prioorg,prioorg,No Members,No,Peace Research Institute,,,,,,,,https://ror.org/04dx54y73,Peace Research Institute,PRIO, +92,risefr.no,&explicit/1-risefrno,risefrno,No Members,No,RISE Fire Research,,,,,,,,,,, +93,ruralis.no,&explicit/1-ruralisno,ruralisno,No Members,No,Institute for Rural and Regional Research,,,,,,,,https://ror.org/0169gd037,Centre for Rural Research,CRR,Note: The ROR entry is not up to date. +94,sik.no,&explicit/1-sikno,sikno,No Members,No,Centre for Intercultural Communication,,,,,,,,,,,Now part of VID +95,snf.no,&explicit/1-snfno,snfno,No Members,No,Centre for Applied Research,,,,,,,,,,, +96,stami.no,&explicit/1-stamino,stamino,No Members,No,National Institute of Occupational Health,,,,,,,,https://ror.org/04g3t6s80,National Institute of Occupational Health,NIOH, +97,teknova.no,&explicit/1-teknovano,teknovano,No Members,No,Teknova,,,,,,,,https://ror.org/02ekw8p73,Teknova,,Now part of NORCE Norwegian Research Centre +98,tel-tek.no,&explicit/1-tel-tekno,tel-tekno,No Members,No,Tel-Tek,,,,,,,,,,,Now part of SINTEF +99,tfou.no,&explicit/1-tfouno,tfouno,No Members,No,Trøndelag Forskning og Utvikling,,,,,,,,https://ror.org/01hw8wm79,Trøndelag Forskning og Utvikling (Norway),TFOU,Now part of SINTEF +100,tisip.no,&explicit/1-tisipno,tisipno,No Members,No,TISIP,,,,,,,,,,, +101,tmforsk.no,&explicit/1-tmforskno,tmforskno,No Members,No,Telemark Research Institute,,,,,,,,https://ror.org/02jjgkb92,Telemark Research Institute,TRI, +102,toi.no,&explicit/1-toino,toino,No Members,No,Institute of Transport Economics,,,,,,,,https://ror.org/04p2pa451,Institute of Transport Economics,TØI, +103,treteknisk.no,&explicit/1-tretekniskno,tretekniskno,No Members,No,Norwegian Institute of Wood Technology,,,,,,,,https://ror.org/00d5qfz16,Norwegian Institute of Wood Technology,NTI, +104,uni.no,&explicit/1-unino,unino,No Members,No,Uni Research,,,,,,,,https://ror.org/016tr2j79,Uni Research (Norway),,Now part of NORCE Norwegian Research Centre +105,vestforsk.no,&explicit/1-vestforskno,vestforskno,No Members,No,Western Norway Research Institute,,,,,,,,https://ror.org/04z1q2j11,Vestlandsforsking,WRNI, +106,westerdals.no,&explicit/1-westerdalsno,westerdalsno,No Members,No,"Westerdals Oslo School of Arts, Communication and Technology",,,,,,,,https://ror.org/02re25503,"Westerdals Oslo School of Arts, Communication and Technology",,Now part of Kristiania + diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.sql b/distros/dataverse.no/init.d/affiliations/affiliations.sql new file mode 100644 index 0000000..0f3f8a3 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/affiliations.sql @@ -0,0 +1,107 @@ +truncate table dvnoaffiliations; +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('1', 'The Norwegian Police University College', 'phs.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('2', 'University of Agder', 'uia.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('3', 'NIFU', 'nifu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('4', 'Østfold University College', 'hiof.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('5', 'The Oslo School of Architecture and Design', 'aho.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('6', 'Chr. Michelsen Institute', 'cmi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('7', 'MF Norwegian School of Theology, Religion and Society', 'mf.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('8', 'Queen Maud University College', 'dmmh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('9', 'Norwegian School of Economics', 'nhh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('10', 'NLA University College', 'nla.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('11', 'Norwegian Polar Institute', 'npolar.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('12', 'Norwegian Computing Center', 'nr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('13', 'SINTEF', 'sintef.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('14', 'Sámi allaskuvla – Sámi University College', 'samiskhs.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('15', 'University of Bergen', 'uib.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('16', 'University of Oslo', 'uio.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('17', 'UiT The Arctic University of Norway', 'uit.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('18', 'Norwegian University of Science and Technology', 'ntnu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('19', 'Norwegian Institute for Nature Research', 'nina.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('20', 'Geological Survey of Norway', 'ngu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('21', 'Molde University College', 'himolde.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('22', 'National Library of Norway', 'nb.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('23', 'University of Stavanger', 'uis.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('24', 'Volda University College', 'hivolda.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('25', 'Oslo National Academy of the Arts', 'khio.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('26', 'Institute for Social Research', 'samfunnsforskning.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('27', 'Lovisenberg Diaconal University College', 'ldh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('28', 'Norwegian Institute of Public Health', 'fhi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('29', 'Norwegian School of Sport Sciences', 'nih.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('30', 'BI Norwegian Business School', 'bi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('31', 'Norwegian Academy of Music', 'nmh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('32', 'Kristiania University College', 'kristiania.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('33', 'Norwegian Defence University College', 'fhs.mil.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('34', 'Ansgar University College', 'ansgarskolen.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('35', 'OsloMet – Oslo Metropolitan University', 'oslomet.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('36', 'Norwegian University of Life Sciences', 'nmbu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('37', 'Norwegian Institute of Bioeconomy Research', 'nibio.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('38', 'VID Specialized University', 'vid.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('39', 'Nord University', 'nord.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('40', 'University of South-Eastern Norway', 'usn.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('41', 'Western Norway University of Applied Sciences', 'hvl.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('42', 'Norwegian centre for violence and traumatic stress studies', 'nkvts.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('43', 'Inland Norway University of Applied Sciences', 'inn.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('44', 'Norwegian Veterinary Institute', 'vetinst.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('45', 'NUBU - The Norwegian Center for Child Behavioral Development', 'nubu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('46', 'The Norwegian Center for Holocaust and Minority Studies', 'hlsenteret.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('47', 'Norwegian Meteorological Institute', 'met.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('48', 'Simula Research Laboratory', 'simula.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('49', 'Agder Research', 'agderforskning.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('50', 'Akvaplan-niva', 'akvaplan.niva.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('51', 'Norwegian Labour Movement Archives and Library', 'arbark.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('52', 'Centre for Advanced Study', 'cas.oslo.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('53', 'CICERO Center for International Climate Research', 'cicero.oslo.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('54', 'Christian Michelsen Research', 'cmr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('55', 'nan', 'dataverse.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('56', 'nan', 'DataverseNO Admin', '1'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('57', 'nan', 'DataverseNO Curator', '1'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('58', 'nan', 'DataverseNO Dataset Creator', '1'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('59', 'Diakonova', 'diakonova.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('60', 'Fafo Foundation', 'fafo.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('61', 'Norwegian Defence Research Establishment', 'ffi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('62', 'Flymedisinsk institutt', 'flymed.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('63', 'Fridtjof Nansen Institute', 'fni.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('64', 'GenØk – Centre for Biosafety', 'genok.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('65', 'Norwegian Institute of Marine Research', 'hi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('66', 'Institute for Energy Technology', 'ife.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('67', 'International Research Institute of Stavanger', 'iris.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('68', 'Institute for Church, Religion, and Worldview Research', 'kifo.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('69', 'Cancer Registry of Norway', 'kreftregisteret.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('70', 'nan', 'legeforeningen.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('71', 'Møreforsking', 'moreforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('72', 'Nansen Environmental and Remote Sensing Center', 'nersc.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('73', 'Aeromedical Center of Norway', 'nfms.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('74', 'Nordland Research Institute', 'nforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('75', 'Norwegian Geotechnical Institute', 'ngi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('76', 'Norwegian Institute for Cultural Heritage Research', 'niku.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('77', 'Norwegian Institute for Air Research', 'nilu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('78', 'Norwegian Institute for Water Research', 'niva.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('79', 'Norsk Landbruksrådgiving', 'nlr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('80', 'Norwegian Nobel Institute', 'nobel.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('81', 'Nofima', 'nofima.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('82', 'Norwegian Research Centre', 'norceresearch.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('83', 'Norwegian Seismic Array', 'norsar.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('84', 'Norsk senter for økologisk landbruk', 'norsok.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('85', 'Norwegian Institute for Sustainability Research', 'norsus.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('86', 'nan', 'norut.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('87', 'Norwegian Institute of International Affairs', 'nupi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('88', 'Ostfold Research', 'ostfoldforskning.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('89', 'Eastern Norway Research Institute', 'ostforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('90', 'Paper and Fibre Research Institute', 'pfi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('91', 'Peace Research Institute', 'prio.org', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('92', 'RISE Fire Research', 'risefr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('93', 'Institute for Rural and Regional Research', 'ruralis.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('94', 'Centre for Intercultural Communication', 'sik.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('95', 'Centre for Applied Research', 'snf.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('96', 'National Institute of Occupational Health', 'stami.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('97', 'Teknova', 'teknova.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('98', 'Tel-Tek', 'tel-tek.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('99', 'Trøndelag Forskning og Utvikling', 'tfou.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('100', 'TISIP', 'tisip.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('101', 'Telemark Research Institute', 'tmforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('102', 'Institute of Transport Economics', 'toi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('103', 'Norwegian Institute of Wood Technology', 'treteknisk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('104', 'Uni Research', 'uni.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('105', 'Western Norway Research Institute', 'vestforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('106', 'Westerdals Oslo School of Arts, Communication and Technology', 'westerdals.no', '2'); diff --git a/distros/dataverse.no/init.d/affiliations/extratrigger.sql b/distros/dataverse.no/init.d/affiliations/extratrigger.sql new file mode 100644 index 0000000..d0ca280 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/extratrigger.sql @@ -0,0 +1,30 @@ +CREATE TABLE IF NOT EXISTS public.dvnoaffiliations ( + id bigint, + dvno_affiliation character varying(255) DEFAULT NULL::character varying, + dvno_group_name character varying(255) DEFAULT NULL::character varying, + dvno_email_level integer DEFAULT 2 +); + +ALTER TABLE public.dvnoaffiliations OWNER TO dataverse; +-- +-- Data for Name: dvnoaffiliations; Type: TABLE DATA; Schema: public; Owner: dataverse +-- +COPY public.dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) FROM stdin; +139 UiT The Arctic University of Norway uit.no 2 +27 Ostfold University College hiof.no 2 +4 Akvaplan-niva akvaplan.niva.no 3 +\. + + +CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations); + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations); + +RETURN NULL; +END; +$$ +CREATE TRIGGER affiliation_trigger AFTER INSERT ON public.authenticateduser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); From b097a016787fbd63f5b68c4e324a87da547da44f Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 24 May 2022 08:46:55 +0000 Subject: [PATCH 004/354] update afiliation --- .gitignore | 1 + .../init.d/affiliations/affiliation2data.py | 35 ++++++++++++------- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 71384b4..a8adb79 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ dataverse.war +.env #Ignoring IDE files .idea diff --git a/distros/dataverse.no/init.d/affiliations/affiliation2data.py b/distros/dataverse.no/init.d/affiliations/affiliation2data.py index 31e4f4d..f548f2e 100644 --- a/distros/dataverse.no/init.d/affiliations/affiliation2data.py +++ b/distros/dataverse.no/init.d/affiliations/affiliation2data.py @@ -6,16 +6,27 @@ # 27 | Ostfold University College | hiof.no | 2 # 4 | Akvaplan-niva | akvaplan.niva.no | 3 -file = '/distrib/private/affiliations.csv' +localfile = '/distrib/private/affiliations.csv' +URLaff = 'rhttps://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/init.d/affiliations/affiliations.csv' + #print(pd.read_csv(open(file, errors='replace'))) -affiliations = pd.read_csv(file) -for i in affiliations.index: - #print(affiliations.iloc[[i]]['dvno_group_name']) - #print("%s %s" % (affiliations.iloc[[i]]['dvno_group_name'].astype(str), affiliations.iloc[[i]]['dvno_affiliation'].astype(str))) - #print(str(affiliations.iloc[[i]]['id'].values[0])) - #print(str(affiliations.iloc[[i]]['dvno_group_name'].values[0])) - #print(str(affiliations.iloc[[i]]['dvno_affiliation'].values[0])) - dvno_email_level = len(str(affiliations.iloc[[i]]['dvno_group_name']).split('.')) - #print(subdomains) - sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliations.iloc[[i]]['dvno_affiliation'].values[0], affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level) - print(sql) +def reload_affiliations(loc): + affiliations = pd.read_csv(loc) + for i in affiliations.index: + #print(affiliations.iloc[[i]]['dvno_group_name']) + #print("%s %s" % (affiliations.iloc[[i]]['dvno_group_name'].astype(str), affiliations.iloc[[i]]['dvno_affiliation'].astype(str))) + #print(str(affiliations.iloc[[i]]['id'].values[0])) + #print(str(affiliations.iloc[[i]]['dvno_group_name'].values[0])) + #print(str(affiliations.iloc[[i]]['dvno_affiliation'].values[0])) + dvno_email_level = len(str(affiliations.iloc[[i]]['dvno_group_name']).split('.')) + #print(subdomains) + sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliations.iloc[[i]]['dvno_affiliation'].values[0], affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level) + print(sql) + return + +try: + reload_affiliations(URLaff) +except: + #print("URL %s doesn't exist\n" % URLaff) + reload_affiliations(localfile) + From aded5352ea221cced0b379c1652b2d999765960c Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 24 May 2022 10:50:19 +0200 Subject: [PATCH 005/354] Update affiliation2data.py typo --- distros/dataverse.no/init.d/affiliations/affiliation2data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/affiliations/affiliation2data.py b/distros/dataverse.no/init.d/affiliations/affiliation2data.py index f548f2e..232e499 100644 --- a/distros/dataverse.no/init.d/affiliations/affiliation2data.py +++ b/distros/dataverse.no/init.d/affiliations/affiliation2data.py @@ -7,7 +7,7 @@ # 4 | Akvaplan-niva | akvaplan.niva.no | 3 localfile = '/distrib/private/affiliations.csv' -URLaff = 'rhttps://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/init.d/affiliations/affiliations.csv' +URLaff = 'https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/init.d/affiliations/affiliations.csv' #print(pd.read_csv(open(file, errors='replace'))) def reload_affiliations(loc): From c8ab323b1f6f4ce751c8af3a39796b5766ea26ec Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 24 May 2022 09:38:52 +0000 Subject: [PATCH 006/354] fixed affiliation --- distros/dataverse.no/init.d/affiliations/affiliation2data.py | 2 +- distros/dataverse.no/init.d/affiliations/recreate_trigger.sql | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 distros/dataverse.no/init.d/affiliations/recreate_trigger.sql diff --git a/distros/dataverse.no/init.d/affiliations/affiliation2data.py b/distros/dataverse.no/init.d/affiliations/affiliation2data.py index f548f2e..232e499 100644 --- a/distros/dataverse.no/init.d/affiliations/affiliation2data.py +++ b/distros/dataverse.no/init.d/affiliations/affiliation2data.py @@ -7,7 +7,7 @@ # 4 | Akvaplan-niva | akvaplan.niva.no | 3 localfile = '/distrib/private/affiliations.csv' -URLaff = 'rhttps://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/init.d/affiliations/affiliations.csv' +URLaff = 'https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/init.d/affiliations/affiliations.csv' #print(pd.read_csv(open(file, errors='replace'))) def reload_affiliations(loc): diff --git a/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql b/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql new file mode 100644 index 0000000..6c1c0a6 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql @@ -0,0 +1,2 @@ +DROP TRIGGER IF EXISTS affiliation_trigger ON public.authenticateduser; +CREATE TRIGGER affiliation_trigger AFTER INSERT ON public.authenticateduser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); From ab085f878b5b2d0db1211d608121be744364384c Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 24 May 2022 10:37:23 +0000 Subject: [PATCH 007/354] trigger update --- distros/dataverse.no/init.d/affiliations/recreate_trigger.sql | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql b/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql index 6c1c0a6..7b86a61 100644 --- a/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql +++ b/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql @@ -1,2 +1,3 @@ DROP TRIGGER IF EXISTS affiliation_trigger ON public.authenticateduser; -CREATE TRIGGER affiliation_trigger AFTER INSERT ON public.authenticateduser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); +DROP TRIGGER IF EXISTS affiliation_trigger ON public.actionlogrecord; +CREATE TRIGGER affiliation_trigger AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); From c83265df9654c7f0a49af950592f5f2ff8d60383 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Mon, 30 May 2022 10:10:42 +0000 Subject: [PATCH 008/354] restart-dataverse.sh --- restart-dataverse.sh | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 restart-dataverse.sh diff --git a/restart-dataverse.sh b/restart-dataverse.sh new file mode 100644 index 0000000..27f6b86 --- /dev/null +++ b/restart-dataverse.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +# Check if Dataverse is online +healthcheck="/tmp/healthcheck.log" +restartpid='/tmp/restart.pid' +rm $healthcheck +DATAVERSE='https://test-docker.dataverse.no' +DATAVERSETMP='https://test-docker1.dataverse.no' +#DATAVERSE=${DATAVERSETMP} +#DATAVERSE='https://demo.dataverse.no' + +curl ${DATAVERSE}/api/dataverses/root|grep "description" >> $healthcheck +DELAY=15 + +if [ -s $healthcheck ]; +then + rm $restartpid + echo "Dataverse ${DATAVERSE} is running. " +else + echo "Dataverse ${DATAVERSE} is stopped" + if [ -s $restartpid ]; + then + echo "Dataverse is restarting..." + else + echo 'restarting...' > $restartpid + date >> /mntblob/logs/restart.log + cd /distrib/dataverse-docker + /usr/local/bin/docker-compose down + sleep $DELAY + /usr/local/bin/docker-compose up -d + fi +fi From 98c85dcf98a06636edb55f1475dd691dcb146088 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Mon, 30 May 2022 10:28:31 +0000 Subject: [PATCH 009/354] add domain name as parameter --- restart-dataverse.sh | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/restart-dataverse.sh b/restart-dataverse.sh index 27f6b86..385b4d2 100644 --- a/restart-dataverse.sh +++ b/restart-dataverse.sh @@ -4,10 +4,8 @@ healthcheck="/tmp/healthcheck.log" restartpid='/tmp/restart.pid' rm $healthcheck -DATAVERSE='https://test-docker.dataverse.no' -DATAVERSETMP='https://test-docker1.dataverse.no' -#DATAVERSE=${DATAVERSETMP} -#DATAVERSE='https://demo.dataverse.no' +DATAVERSE=$1 #'https://test-docker.dataverse.no' +echo $DATAVERSE curl ${DATAVERSE}/api/dataverses/root|grep "description" >> $healthcheck DELAY=15 From 932b23b243fc50abe3fef09074604d151e19b133 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Thu, 2 Jun 2022 11:49:49 +0000 Subject: [PATCH 010/354] migration script --- distros/dataverse.no/migration/createDBfreomDump.sql | 3 +++ distros/dataverse.no/migration/replaceDatabase.sh | 6 ++++++ distros/dataverse.no/migration/replaceDatabase.sql | 8 ++++++++ 3 files changed, 17 insertions(+) create mode 100644 distros/dataverse.no/migration/createDBfreomDump.sql create mode 100644 distros/dataverse.no/migration/replaceDatabase.sh create mode 100644 distros/dataverse.no/migration/replaceDatabase.sql diff --git a/distros/dataverse.no/migration/createDBfreomDump.sql b/distros/dataverse.no/migration/createDBfreomDump.sql new file mode 100644 index 0000000..3f50c1d --- /dev/null +++ b/distros/dataverse.no/migration/createDBfreomDump.sql @@ -0,0 +1,3 @@ +dropdb -U dataverse dataverse; +createdb -U dataverse dataverse; +psql -U dataverse dataverse -f /mnttmp/opendata*.sql diff --git a/distros/dataverse.no/migration/replaceDatabase.sh b/distros/dataverse.no/migration/replaceDatabase.sh new file mode 100644 index 0000000..3da3cd4 --- /dev/null +++ b/distros/dataverse.no/migration/replaceDatabase.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +docker cp ./replaceDatabase.sql postgres:/tmp + +docker exec -it postgres bash -c "su - postgres;psql -U dataverse dataverse -f /tmp/replaceDatabase.sql" + diff --git a/distros/dataverse.no/migration/replaceDatabase.sql b/distros/dataverse.no/migration/replaceDatabase.sql new file mode 100644 index 0000000..00ee672 --- /dev/null +++ b/distros/dataverse.no/migration/replaceDatabase.sql @@ -0,0 +1,8 @@ +update dvobject set storageidentifier='S3://2002-green-dataversenotest1:' where dtype='Dataset'; + +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'local://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + + + + From 5ab2844330306eb6bc0d9ab255c9ac606343389e Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Thu, 2 Jun 2022 11:57:49 +0000 Subject: [PATCH 011/354] updated sql statement --- distros/dataverse.no/migration/replaceDatabase.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distros/dataverse.no/migration/replaceDatabase.sql b/distros/dataverse.no/migration/replaceDatabase.sql index 00ee672..6d0d252 100644 --- a/distros/dataverse.no/migration/replaceDatabase.sql +++ b/distros/dataverse.no/migration/replaceDatabase.sql @@ -1,4 +1,4 @@ -update dvobject set storageidentifier='S3://2002-green-dataversenotest1:' where dtype='Dataset'; +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE dtype='Dataset' and storageidentifier like '%file://%'; UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'local://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); From cf274f0efde308073fc035660e2e5946faf30336 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 3 Jun 2022 09:59:25 +0000 Subject: [PATCH 012/354] bacup/cleanup sript for db --- distros/dataverse.no/migration/cleanup-database.sql | 6 ++++++ distros/dataverse.no/migration/create-backup-db.sh | 7 +++++++ 2 files changed, 13 insertions(+) create mode 100644 distros/dataverse.no/migration/cleanup-database.sql create mode 100644 distros/dataverse.no/migration/create-backup-db.sh diff --git a/distros/dataverse.no/migration/cleanup-database.sql b/distros/dataverse.no/migration/cleanup-database.sql new file mode 100644 index 0000000..16c9490 --- /dev/null +++ b/distros/dataverse.no/migration/cleanup-database.sql @@ -0,0 +1,6 @@ + +alter table authenticateduser drop constraint authenticateduser_email_key; +drop index index_authenticateduser_lower_email; +Update authenticateduser set email='noreply@uit.no'; + +update datasetfieldvalue set value='noreply@uit.no' where datasetfield_id in (select id from datasetfield where datasetfieldtype_id=15); diff --git a/distros/dataverse.no/migration/create-backup-db.sh b/distros/dataverse.no/migration/create-backup-db.sh new file mode 100644 index 0000000..efe88b5 --- /dev/null +++ b/distros/dataverse.no/migration/create-backup-db.sh @@ -0,0 +1,7 @@ +#|/bin/bash +docker exec -it postgres bash -c "pg_dump -U dataverse dataverse > /var/lib/postgresql/data/dataverse.dump" +gzip -c /extdisk/database-data-demo/dataverse.dump > "/extdisk/database-data-demo/dataverse$(date +'%Y%m%d').dump.gz" +docker exec -it postgres bash -c "createdb -U dataverse dataverse-tmp" +docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/dataverse.dump" +cp ./cleanup-database.sql /var/lib/postgresql/data/ +docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/cleanup-database.sql" From da1f219c7d80d6ff32cec57e3cc65b921d4f0d24 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 8 Jul 2022 10:40:39 +0200 Subject: [PATCH 013/354] added bucket namy by Philipp --- .../init.d/affiliations/affiliations.csv | 232 ++++++++++-------- 1 file changed, 124 insertions(+), 108 deletions(-) diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.csv b/distros/dataverse.no/init.d/affiliations/affiliations.csv index fc8a230..8879cb9 100644 --- a/distros/dataverse.no/init.d/affiliations/affiliations.csv +++ b/distros/dataverse.no/init.d/affiliations/affiliations.csv @@ -1,108 +1,124 @@ -id,dvno_group_name,dvno_group_id_explicit,dvno_group_id,dvno_membership,dvno_users,dvno_affiliation,feide_id,feide_email,feide_name,feide_nb,feide_nn,feide_en,feide_se,ror_id,ror_name,ror_acronym,comments -1,phs.no,&explicit/1-phsno,phsno,No Members,No,The Norwegian Police University College,54,phs.no,The Norwegian Police University College,Politihøgskolen,Politihøgskulen,The Norwegian Police University College,,https://ror.org/05486d596,Norwegian Police University College,PHS, -2,uia.no,&explicit/1-uiano,uiano,"24 users, 0 groups",24,University of Agder,55,uia.no,University of Agder,Universitetet i Agder,Universitetet i Agder,University of Agder,Agder Universitehta,https://ror.org/03x297z98,University of Agder,, -3,nifu.no,&explicit/1-nifuno,nifuno,No Members,No,NIFU,58,nifu.no,NIFU,NIFU,NIFU,NIFU,,,,, -4,hiof.no,&explicit/1-hiofno,hiofno,"7 users, 0 groups",7,Østfold University College,70,hiof.no,Østfold University College,Høgskolen i Østfold,Høgskulen i Østfold,Østfold University College,Østfold Allaskuvla,https://ror.org/04gf7fp41,Østfold University College,HiØ, -5,aho.no,&explicit/1-ahono,ahono,No Members,No,The Oslo School of Architecture and Design,93,aho.no,The Oslo School of Architecture and Design,Arkitektur- og designhøgskolen i Oslo,Arkitektur- og designhøgskulen i Oslo,The Oslo School of Architecture and Design,,,,, -6,cmi.no,&explicit/1-cmino,cmino,No Members,No,Chr. Michelsen Institute,96,cmi.no,Chr. Michelsen Institute,Chr. Michelsens Institutt,Chr. Michelsens Institutt,Chr. Michelsen Institute,,https://ror.org/02w7rbf39,Christian Michelsen Institute,CMI, -7,mf.no,&explicit/1-mfno,mfno,No Members,No,"MF Norwegian School of Theology, Religion and Society",100,mf.no,"MF Norwegian School of Theology, Religion and Society",MF vitenskapelig høyskole,MF vitenskapelig høyskole,"MF Norwegian School of Theology, Religion and Society",,https://ror.org/01qafy255,"MF Norwegian School of Theology, Religion and Society",, -8,dmmh.no,&explicit/1-dmmhno,dmmhno,No Members,No,Queen Maud University College,103,dmmh.no,Queen Maud University College,Dronning Mauds Minne Høgskole,Dronning Mauds Minne Høgskule,Queen Maud University College,,https://ror.org/043zemc40,Queen Maud University College,DMMH, -9,nhh.no,&explicit/1-nhhno,nhhno,No Members,No,Norwegian School of Economics,119,nhh.no,Norwegian School of Economics,Norges Handelshøyskole,Norges Handelshøyskole,Norwegian School of Economics,,https://ror.org/04v53s997,Norwegian School of Economics,NHH, -10,nla.no,&explicit/1-nlano,nlano,No Members,No,NLA University College,123,nla.no,NLA University College,NLA Høgskolen,NLA Høgskolen,NLA University College,NLA Høgskolen,https://ror.org/05fdt2q64,NLA University College,, -11,npolar.no,&explicit/1-npolarno,npolarno,No Members,No,Norwegian Polar Institute,124,,Norwegian Polar Institute,Norsk Polarinstitutt,,Norwegian Polar Institute,,https://ror.org/03avf6522,Norwegian Polar Institute,, -12,nr.no,&explicit/1-nrno,nrno,No Members,No,Norwegian Computing Center,125,nr.no,Norsk Regnesentral,Norsk Regnesentral,,,,https://ror.org/02gm7te43,Norwegian Computing Center,NR, -13,sintef.no,&explicit/1-sintefno,sintefno,"1 user, 0 groups",1,SINTEF,131,sintef.no,SINTEF,SINTEF,SINTEF,SINTEF,,https://ror.org/01f677e56,SINTEF,, -14,samiskhs.no,&explicit/1-samiskhsno,samiskhsno,No Members,No,Sámi allaskuvla – Sámi University College,133,samas.no,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,Sámi allaskuvla – Samisk høgskule,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,https://ror.org/028ahgk39,Sámi University of Applied Sciences,, -15,uib.no,&explicit/1-uibno,uibno,"76 users, 0 groups",76,University of Bergen,137,uib.no,University of Bergen,Universitetet i Bergen,Universitetet i Bergen,University of Bergen,Bergen Universitehta,https://ror.org/03zga2b32,University of Bergen,, -16,uio.no,&explicit/1-uiono,uiono,"33 users, 0 groups",33,University of Oslo,138,uio.no,University of Oslo,Universitetet i Oslo,Universitetet i Oslo,University of Oslo,Oslo Universitehta,https://ror.org/01xtthb56,University of Oslo,UiO, -17,uit.no,&explicit/1-uitno,uitno,"341 users, 0 groups",341,UiT The Arctic University of Norway,139,uit.no,UiT The Arctic University of Norway,UiT Norges arktiske universitet,UiT Noregs arktiske universitet,UiT The Arctic University of Norway,UiT Norgga árktalaš universitehta,https://ror.org/00wge5k78,UiT The Arctic University of Norway,UiT, -18,ntnu.no,&explicit/1-ntnuno,ntnuno,"142 users, 0 groups",142,Norwegian University of Science and Technology,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU, -19,nina.no,&explicit/1-ninano,ninano,No Members,No,Norwegian Institute for Nature Research,144,nina.no,NINA,NINA,NINA,,,https://ror.org/04aha0598,Norwegian Institute for Nature Research,NINA, -20,ngu.no,&explicit/1-nguno,nguno,"1 user, 0 groups",1,Geological Survey of Norway,145,ngu.no,Geological Survey of Norway,Norges geologiske undersøkelse,Norges geologiske undersøkelse,Geological Survey of Norway,,,,, -21,himolde.no,&explicit/1-himoldeno,himoldeno,No Members,No,Molde University College,158,himolde.no,Molde University College,Høgskolen i Molde,Høgskulen i Molde,Molde University College,Molde Allaskuvla,https://ror.org/00kxjcd28,Molde University College,HiM, -22,nb.no,&explicit/1-nbno,nbno,No Members,No,National Library of Norway,160,nb.no,National Library of Norway,Nasjonalbiblioteket,Nasjonalbiblioteket,National Library of Norway,,,,, -23,uis.no,&explicit/1-uisno,uisno,"14 users, 0 groups",14,University of Stavanger,163,uis.no,University of Stavanger,Universitetet i Stavanger,Universitetet i Stavanger,University of Stavanger,Stavanger Universitehta,https://ror.org/02qte9q33,University of Stavanger,UiS, -24,hivolda.no,&explicit/1-hivoldano,hivoldano,No Members,No,Volda University College,165,hivolda.no,Volda University College,Høgskulen i Volda,Høgskulen i Volda,Volda University College,Volda Allaskuvla,https://ror.org/01eeqzy24,Volda University College,HVO, -25,khio.no,&explicit/1-khiono,khiono,No Members,No,Oslo National Academy of the Arts,195,khio.no,Oslo National Academy of the Arts,Kunsthøgskolen i Oslo,Kunsthøgskulen i Oslo,Oslo National Academy of the Arts,,https://ror.org/0543h9a62,Oslo National Academy of the Arts,, -26,samfunnsforskning.no,&explicit/1-samfunnsforskningno,samfunnsforskningno,No Members,No,Institute for Social Research,197,samfunnsforskning.no,Institute for Social Research,Institutt for samfunnsforskning,,Institute for Social Research,,https://ror.org/05swz5441,Institute for Social Research,IFS, -27,ldh.no,&explicit/1-ldhno,ldhno,No Members,No,Lovisenberg Diaconal University College,216,ldh.no,Lovisenberg diaconal university college,Lovisenberg diakonale høgskole,Lovisenberg diakonale høgskule,Lovisenberg diaconal university college,,https://ror.org/015rzvz05,Lovisenberg Diakonale Høgskole,LDH, -28,fhi.no,&explicit/1-fhino,fhino,No Members,No,Norwegian Institute of Public Health,310,fhi.no,Norwegian Institute of Public Health,Folkehelseinstituttet,,Norwegian Institute of Public Health,,https://ror.org/046nvst19,Norwegian Institute of Public Health,NIPH, -29,nih.no,&explicit/1-nihno,nihno,No Members,No,Norwegian School of Sport Sciences,323,nih.no,Norwegian School of Sport Sciences,Norges idrettshøgskole,Noregs idrettshøgskule,Norwegian School of Sport Sciences,,https://ror.org/045016w83,Norwegian School of Sport Sciences,NSSS, -30,bi.no,&explicit/1-bino,bino,No Members,No,BI Norwegian Business School,324,bi.no,BI Norwegian Business School,Handelshøyskolen BI,Handelshøyskolen BI,BI Norwegian Business School,,https://ror.org/03ez40v33,BI Norwegian Business School,, -31,nmh.no,&explicit/1-nmhno,nmhno,No Members,No,Norwegian Academy of Music,325,nmh.no,Norwegian Academy of Music,Norges musikkhøgskole,Noregs musikkhøgskule,Norwegian Academy of Music,,https://ror.org/052dy9793,Norwegian Academy of Music,NMH, -32,kristiania.no,&explicit/1-kristianiano,kristianiano,No Members,No,Kristiania University College,17007,feide.egms.no,Kristiania University College,Høyskolen Kristiania,Høyskolen Kristiania,Kristiania University College,,https://ror.org/03gss5916,Campus Kristiania,, -33,fhs.mil.no,&explicit/1-fhsmilno,fhsmilno,"1 user, 0 groups",1,Norwegian Defence University College,115267,mil.no,Norwegian Defence University College,Forsvarets høgskoler,Forsvarets høgskuler,Norwegian Defence University College,,https://ror.org/02vfz9j23,Norwegian Defence University College,NDUC, -34,ansgarskolen.no,&explicit/1-ansgarskolenno,ansgarskolenno,No Members,No,Ansgar University College,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,, -35,oslomet.no,&explicit/1-oslometno,oslometno,No Members,No,OsloMet – Oslo Metropolitan University,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA, -36,nmbu.no,&explicit/1-nmbuno,nmbuno,"48 users, 0 groups",48,Norwegian University of Life Sciences,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU, -37,nibio.no,&explicit/1-nibiono,nibiono,No Members,No,Norwegian Institute of Bioeconomy Research,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO, -38,vid.no,&explicit/1-vidno,vidno,No Members,No,VID Specialized University,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID, -39,nord.no,&explicit/1-nordno,nordno,"23 users, 0 groups",23,Nord University,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,, -40,usn.no,&explicit/1-usnno,usnno,No Members,No,University of South-Eastern Norway,2066647,usn.no,University of South-Eastern Norway,Universitetet i Sørøst-Norge,Universitetet i Søraust-Noreg,University of South-Eastern Norway,,https://ror.org/05ecg5h20,University of South-Eastern Norway,USN, -41,hvl.no,&explicit/1-hvlno,hvlno,"28 users, 0 groups",28,Western Norway University of Applied Sciences,2126357,hvl.no,Western Norway University of Applied Sciences,Høgskulen på Vestlandet,Høgskulen på Vestlandet,Western Norway University of Applied Sciences,,https://ror.org/05phns765,Western Norway University of Applied Sciences,HVL, -42,nkvts.no,&explicit/1-nkvtsno,nkvtsno,No Members,No,Norwegian centre for violence and traumatic stress studies,2127917,nkvts.no,Norwegian centre for violence and traumatic stress studies,Nasjonalt kunnskapssenter om vold og traumatisk stress,,Norwegian centre for violence and traumatic stress studies,,https://ror.org/01p618c36,Norwegian Centre for Violence and Traumatic Stress Studies,NKVTS, -43,inn.no,&explicit/1-innno,innno,"26 users, 0 groups",26,Inland Norway University of Applied Sciences,2128215,inn.no,Inland Norway University of Applied Sciences,Høgskolen i Innlandet,,Inland Norway University of Applied Sciences,,https://ror.org/02dx4dc92,Inland Norway University of Applied Sciences,, -44,vetinst.no,&explicit/1-vetinstno,vetinstno,No Members,No,Norwegian Veterinary Institute,2217125,vetinst.no,Veterinærinstituttet,Veterinærinstituttet,,,,https://ror.org/05m6y3182,Norwegian Veterinary Institute,NVI, -45,nubu.no,&explicit/1-nubuno,nubuno,No Members,No,NUBU - The Norwegian Center for Child Behavioral Development,2217221,nubu.no,NUBU - The Norwegian Center for Child Behavioral Development,NUBU - Nasjonalt utviklingssenter for barn og unge,,NUBU - The Norwegian Center for Child Behavioral Development,,,,, -46,hlsenteret.no,&explicit/1-hlsenteretno,hlsenteretno,No Members,No,The Norwegian Center for Holocaust and Minority Studies,2217222,hlsenteret.no,The Norwegian Center for Holocaust and Minority Studies,Senter for studier av Holocaust og livssynsminoriteter,,The Norwegian Center for Holocaust and Minority Studies,,https://ror.org/03ppkyp25,Center for Studies of Holocaust and Religious Minorities,, -47,met.no,&explicit/1-metno,metno,No Members,No,Norwegian Meteorological Institute,2217341,,Meteorologisk Institutt,Meteorologisk Institutt,,,,https://ror.org/001n36p86,Norwegian Meteorological Institute,MET, -48,simula.no,&explicit/1-simulano,simulano,No Members,No,Simula Research Laboratory,2217477,simula.no,Simula,Simula,,,,https://ror.org/00vn06n10,Simula Research Laboratory,, -49,agderforskning.no,&explicit/1-agderforskningno,agderforskningno,No Members,No,Agder Research,,,,,,,,https://ror.org/02k3w5n89,Agder Research,, -50,akvaplan.niva.no,&explicit/1-akvaplannivano,akvaplannivano,No Members,No,Akvaplan-niva,,,,,,,,https://ror.org/03nrps502,Akvaplan-niva (Norway),, -51,arbark.no,&explicit/1-arbarkno,arbarkno,No Members,No,Norwegian Labour Movement Archives and Library,,,,,,,,https://ror.org/05x91m338,Norwegian Labour Movement Archives and Library,, -52,cas.oslo.no,&explicit/1-casoslono,casoslono,No Members,No,Centre for Advanced Study,,,,,,,,https://ror.org/05rbhea42,Centre for Advanced Study,CAS, -53,cicero.oslo.no,&explicit/1-cicerooslono,cicerooslono,No Members,No,CICERO Center for International Climate Research,,,,,,,,https://ror.org/01gw5dy53,CICERO Center for International Climate Research,CICERO, -54,cmr.no,&explicit/1-cmrno,cmrno,No Members,No,Christian Michelsen Research,,,,,,,,,,,Now part of NORCE Norwegian Research Centre -55,dataverse.no,&explicit/1-dataverseno,dataverseno,No Members,No,,,,,,,,,,,, -56,DataverseNO Admin,&explicit/1-DataverseNOAdmin,DataverseNOAdmin,"3 users, 0 groups",3,,,,,,,,,,,, -57,DataverseNO Curator,&explicit/1-DvNOCurator,DvNOCurator,"4 users, 0 groups",4,,,,,,,,,,,, -58,DataverseNO Dataset Creator,&explicit/1-DataverseNODatasetCreator,DataverseNODatasetCreator,"4 users, 0 groups",4,,,,,,,,,,,, -59,diakonova.no,&explicit/1-diakonovano,diakonovano,No Members,No,Diakonova,,,,,,,,,,, -60,fafo.no,&explicit/1-fafono,fafono,No Members,No,Fafo Foundation,,,,,,,,https://ror.org/00ee9xb13,Fafo Foundation,, -61,ffi.no,&explicit/1-ffino,ffino,No Members,No,Norwegian Defence Research Establishment,,,,,,,,https://ror.org/0098gnz32,Norwegian Defence Research Establishment,FFI, -62,flymed.no,&explicit/1-flymedno,flymedno,No Members,No,Flymedisinsk institutt,,,,,,,,,,, -63,fni.no,&explicit/1-fnino,fnino,No Members,No,Fridtjof Nansen Institute,,,,,,,,https://ror.org/04ep2t954,Fridtjof Nansen Institute,FNI, -64,genok.no,&explicit/1-genokno,genokno,No Members,No,GenØk – Centre for Biosafety,,,,,,,,https://ror.org/027arfy53,GenØk,, -65,hi.no,&explicit/1-hino,hino,No Members,No,Norwegian Institute of Marine Research,,,,,,,,https://ror.org/05vg74d16,Norwegian Institute of Marine Research,IMR, -66,ife.no,&explicit/1-ifeno,ifeno,No Members,No,Institute for Energy Technology,,,,,,,,https://ror.org/02jqtg033,Institute for Energy Technology,IFE, -67,iris.no,&explicit/1-irisno,irisno,No Members,No,International Research Institute of Stavanger,,,,,,,,https://ror.org/0502t5s28,International Research Institute of Stavanger,IRIS,Now part of NORCE Norwegian Research Centre -68,kifo.no,&explicit/1-kifono,kifono,No Members,No,"Institute for Church, Religion, and Worldview Research",,,,,,,,https://ror.org/051p4t773,"Institute for Church, Religion, and Worldview Research",KIFO, -69,kreftregisteret.no,&explicit/1-kreftregisteretno,kreftregisteretno,No Members,No,Cancer Registry of Norway,,,,,,,,https://ror.org/03sm1ej59,Cancer Registry of Norway,CRN, -70,legeforeningen.no,&explicit/1-legeforeningenno,legeforeningenno,No Members,No,,,,,,,,,,,, -71,moreforsk.no,&explicit/1-moreforskno,moreforskno,No Members,No,Møreforsking,,,,,,,,https://ror.org/02w4kss89,Møreforsking (Norway),, -72,nersc.no,&explicit/1-nerscno,nerscno,No Members,No,Nansen Environmental and Remote Sensing Center,,,,,,,,,,, -73,nfms.no,&explicit/1-nfmsno,nfmsno,No Members,No,Aeromedical Center of Norway,,,,,,,,,,, -74,nforsk.no,&explicit/1-nforskno,nforskno,No Members,No,Nordland Research Institute,,,,,,,,https://ror.org/02wvb2a30,Nordland Research Institute,, -75,ngi.no,&explicit/1-ngino,ngino,No Members,No,Norwegian Geotechnical Institute,,,,,,,,https://ror.org/032ksge37,Norwegian Geotechnical Institute,NGI, -76,niku.no,&explicit/1-nikuno,nikuno,No Members,No,Norwegian Institute for Cultural Heritage Research,,,,,,,,https://ror.org/02xhrye98,Norwegian Institute for Cultural Heritage Research,NIKU, -77,nilu.no,&explicit/1-niluno,niluno,No Members,No,Norwegian Institute for Air Research,,,,,,,,https://ror.org/00q7d9z06,Norwegian Institute for Air Research,NILU, -78,niva.no,&explicit/1-nivano,nivano,No Members,No,Norwegian Institute for Water Research,,,,,,,,https://ror.org/03hrf8236,Norwegian Institute for Water Research,NIVA, -79,nlr.no,&explicit/1-nlrno,nlrno,No Members,No,Norsk Landbruksrådgiving,,,,,,,,https://ror.org/03c1zct07,Norsk Landbruksrådgiving,NLR, -80,nobel.no,&explicit/1-nobelno,nobelno,No Members,No,Norwegian Nobel Institute,,,,,,,,https://ror.org/055wgnw59,Norwegian Nobel Institute,, -81,nofima.no,&explicit/1-nofimano,nofimano,No Members,No,Nofima,,,,,,,,https://ror.org/02v1rsx93,Nofima,, -82,norceresearch.no,&explicit/1-norceresearchno,norceresearchno,"1 user, 0 groups",1,Norwegian Research Centre,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE, -83,norsar.no,&explicit/1-norsarno,norsarno,No Members,No,Norwegian Seismic Array,,,,,,,,https://ror.org/02vw8cm83,Norsar,, -84,norsok.no,&explicit/1-norsokno,norsokno,No Members,No,Norsk senter for økologisk landbruk,,,,,,,,,,, -85,norsus.no,&explicit/1-norsusno,norsusno,No Members,No,Norwegian Institute for Sustainability Research,,,,,,,,,,, -86,norut.no,&explicit/1-norutno,norutno,No Members,No,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre -87,nupi.no,&explicit/1-nupino,nupino,No Members,No,Norwegian Institute of International Affairs,,,,,,,,https://ror.org/01pznaa94,Norwegian Institute of International Affairs,NUPI, -88,ostfoldforskning.no,&explicit/1-ostfoldforskningno,ostfoldforskningno,No Members,No,Ostfold Research,,,,,,,,https://ror.org/01vmqaq17,Ostfold Research (Norway),,Has changed name to -89,ostforsk.no,&explicit/1-ostforskno,ostforskno,No Members,No,Eastern Norway Research Institute,,,,,,,,https://ror.org/020deqr47,Eastern Norway Research Institute,ENRI, -90,pfi.no,&explicit/1-pfino,pfino,No Members,No,Paper and Fibre Research Institute,,,,,,,,https://ror.org/053qb6g74,Paper and Fibre Research Institute,PFI, -91,prio.org,&explicit/1-prioorg,prioorg,No Members,No,Peace Research Institute,,,,,,,,https://ror.org/04dx54y73,Peace Research Institute,PRIO, -92,risefr.no,&explicit/1-risefrno,risefrno,No Members,No,RISE Fire Research,,,,,,,,,,, -93,ruralis.no,&explicit/1-ruralisno,ruralisno,No Members,No,Institute for Rural and Regional Research,,,,,,,,https://ror.org/0169gd037,Centre for Rural Research,CRR,Note: The ROR entry is not up to date. -94,sik.no,&explicit/1-sikno,sikno,No Members,No,Centre for Intercultural Communication,,,,,,,,,,,Now part of VID -95,snf.no,&explicit/1-snfno,snfno,No Members,No,Centre for Applied Research,,,,,,,,,,, -96,stami.no,&explicit/1-stamino,stamino,No Members,No,National Institute of Occupational Health,,,,,,,,https://ror.org/04g3t6s80,National Institute of Occupational Health,NIOH, -97,teknova.no,&explicit/1-teknovano,teknovano,No Members,No,Teknova,,,,,,,,https://ror.org/02ekw8p73,Teknova,,Now part of NORCE Norwegian Research Centre -98,tel-tek.no,&explicit/1-tel-tekno,tel-tekno,No Members,No,Tel-Tek,,,,,,,,,,,Now part of SINTEF -99,tfou.no,&explicit/1-tfouno,tfouno,No Members,No,Trøndelag Forskning og Utvikling,,,,,,,,https://ror.org/01hw8wm79,Trøndelag Forskning og Utvikling (Norway),TFOU,Now part of SINTEF -100,tisip.no,&explicit/1-tisipno,tisipno,No Members,No,TISIP,,,,,,,,,,, -101,tmforsk.no,&explicit/1-tmforskno,tmforskno,No Members,No,Telemark Research Institute,,,,,,,,https://ror.org/02jjgkb92,Telemark Research Institute,TRI, -102,toi.no,&explicit/1-toino,toino,No Members,No,Institute of Transport Economics,,,,,,,,https://ror.org/04p2pa451,Institute of Transport Economics,TØI, -103,treteknisk.no,&explicit/1-tretekniskno,tretekniskno,No Members,No,Norwegian Institute of Wood Technology,,,,,,,,https://ror.org/00d5qfz16,Norwegian Institute of Wood Technology,NTI, -104,uni.no,&explicit/1-unino,unino,No Members,No,Uni Research,,,,,,,,https://ror.org/016tr2j79,Uni Research (Norway),,Now part of NORCE Norwegian Research Centre -105,vestforsk.no,&explicit/1-vestforskno,vestforskno,No Members,No,Western Norway Research Institute,,,,,,,,https://ror.org/04z1q2j11,Vestlandsforsking,WRNI, -106,westerdals.no,&explicit/1-westerdalsno,westerdalsno,No Members,No,"Westerdals Oslo School of Arts, Communication and Technology",,,,,,,,https://ror.org/02re25503,"Westerdals Oslo School of Arts, Communication and Technology",,Now part of Kristiania - +id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_abbreviation,bucketname_in_cloudian,bucketname_in_dataverseno,feide_id,feide_email,feide_name,feide_nb,feide_nn,feide_en,feide_se,ror_id,ror_name,ror_acronym,comments +1,phs.no,phsno,&explicit/1-phsno,The Norwegian Police University College,PHS,,,54,phs.no,The Norwegian Police University College,Politihøgskolen,Politihøgskulen,The Norwegian Police University College,,https://ror.org/05486d596,Norwegian Police University College,PHS, +2,uia.no,uiano,&explicit/1-uiano,University of Agder,UiA,2002-red-dataverseno-uia,cloudian-uia,55,uia.no,University of Agder,Universitetet i Agder,Universitetet i Agder,University of Agder,Agder Universitehta,https://ror.org/03x297z98,University of Agder,, +3,nifu.no,nifuno,&explicit/1-nifuno,"Nordic Institute for Studies innovation, research and education",NIFU,,,58,nifu.no,NIFU,NIFU,NIFU,NIFU,,,,, +4,hiof.no,hiofno,&explicit/1-hiofno,Østfold University College,HiØ,2002-red-dataverseno-hiof,cloudian-hiof,70,hiof.no,Østfold University College,Høgskolen i Østfold,Høgskulen i Østfold,Østfold University College,Østfold Allaskuvla,https://ror.org/04gf7fp41,Østfold University College,HiØ, +5,aho.no,ahono,&explicit/1-ahono,The Oslo School of Architecture and Design,AHO,,,93,aho.no,The Oslo School of Architecture and Design,Arkitektur- og designhøgskolen i Oslo,Arkitektur- og designhøgskulen i Oslo,The Oslo School of Architecture and Design,,,,, +6,cmi.no,cmino,&explicit/1-cmino,Chr. Michelsen Institute,CMI,,,96,cmi.no,Chr. Michelsen Institute,Chr. Michelsens Institutt,Chr. Michelsens Institutt,Chr. Michelsen Institute,,https://ror.org/02w7rbf39,Christian Michelsen Institute,CMI, +7,mf.no,mfno,&explicit/1-mfno,"MF Norwegian School of Theology, Religion and Society",MF,2002-red-dataverseno-mf,cloudian-mf,100,mf.no,"MF Norwegian School of Theology, Religion and Society",MF vitenskapelig høyskole,MF vitenskapelig høyskole,"MF Norwegian School of Theology, Religion and Society",,https://ror.org/01qafy255,"MF Norwegian School of Theology, Religion and Society",, +8,dmmh.no,dmmhno,&explicit/1-dmmhno,Queen Maud University College,DMMH,,,103,dmmh.no,Queen Maud University College,Dronning Mauds Minne Høgskole,Dronning Mauds Minne Høgskule,Queen Maud University College,,https://ror.org/043zemc40,Queen Maud University College,DMMH, +9,nhh.no,nhhno,&explicit/1-nhhno,Norwegian School of Economics,NHH,,,119,nhh.no,Norwegian School of Economics,Norges Handelshøyskole,Norges Handelshøyskole,Norwegian School of Economics,,https://ror.org/04v53s997,Norwegian School of Economics,NHH, +10,nla.no,nlano,&explicit/1-nlano,NLA University College,NLA,,,123,nla.no,NLA University College,NLA Høgskolen,NLA Høgskolen,NLA University College,NLA Høgskolen,https://ror.org/05fdt2q64,NLA University College,, +11,npolar.no,npolarno,&explicit/1-npolarno,Norwegian Polar Institute,,,,124,,Norwegian Polar Institute,Norsk Polarinstitutt,,Norwegian Polar Institute,,https://ror.org/03avf6522,Norwegian Polar Institute,, +12,nr.no,nrno,&explicit/1-nrno,Norwegian Computing Center,NR,,,125,nr.no,Norsk Regnesentral,Norsk Regnesentral,,,,https://ror.org/02gm7te43,Norwegian Computing Center,NR, +13,sintef.no,sintefno,&explicit/1-sintefno,SINTEF,,,,131,sintef.no,SINTEF,SINTEF,SINTEF,SINTEF,,https://ror.org/01f677e56,SINTEF,, +14,samiskhs.no,samiskhsno,&explicit/1-samiskhsno,Sámi allaskuvla – Sámi University College,,,,133,samas.no,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,Sámi allaskuvla – Samisk høgskule,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,https://ror.org/028ahgk39,Sámi University of Applied Sciences,, +15,uib.no,uibno,&explicit/1-uibno,University of Bergen,UiB,2002-red-dataverseno-uib,cloudian-uib,137,uib.no,University of Bergen,Universitetet i Bergen,Universitetet i Bergen,University of Bergen,Bergen Universitehta,https://ror.org/03zga2b32,University of Bergen,, +16,uio.no,uiono,&explicit/1-uiono,University of Oslo,UiO,2002-red-dataverseno-uio,cloudian-uio,138,uio.no,University of Oslo,Universitetet i Oslo,Universitetet i Oslo,University of Oslo,Oslo Universitehta,https://ror.org/01xtthb56,University of Oslo,UiO, +17,uit.no,uitno,&explicit/1-uitno,UiT The Arctic University of Norway,UiT,2002-red-dataverseno-uit,cloudian-uit,139,uit.no,UiT The Arctic University of Norway,UiT Norges arktiske universitet,UiT Noregs arktiske universitet,UiT The Arctic University of Norway,UiT Norgga árktalaš universitehta,https://ror.org/00wge5k78,UiT The Arctic University of Norway,UiT, +18,ntnu.no,ntnuno,&explicit/1-ntnuno,Norwegian University of Science and Technology,NTNU,2002-red-dataverseno-ntnu,cloudian-ntnu,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU, +19,nina.no,ninano,&explicit/1-ninano,Norwegian Institute for Nature Research,NINA,,,144,nina.no,NINA,NINA,NINA,,,https://ror.org/04aha0598,Norwegian Institute for Nature Research,NINA, +20,ngu.no,nguno,&explicit/1-nguno,Geological Survey of Norway,NGU,,,145,ngu.no,Geological Survey of Norway,Norges geologiske undersøkelse,Norges geologiske undersøkelse,Geological Survey of Norway,,,,, +21,himolde.no,himoldeno,&explicit/1-himoldeno,Molde University College,HiM,,,158,himolde.no,Molde University College,Høgskolen i Molde,Høgskulen i Molde,Molde University College,Molde Allaskuvla,https://ror.org/00kxjcd28,Molde University College,HiM, +22,nb.no,nbno,&explicit/1-nbno,National Library of Norway,NB,,,160,nb.no,National Library of Norway,Nasjonalbiblioteket,Nasjonalbiblioteket,National Library of Norway,,,,, +23,uis.no,uisno,&explicit/1-uisno,University of Stavanger,UiS,2002-red-dataverseno-uis,cloudian-uis,163,uis.no,University of Stavanger,Universitetet i Stavanger,Universitetet i Stavanger,University of Stavanger,Stavanger Universitehta,https://ror.org/02qte9q33,University of Stavanger,UiS, +24,hivolda.no,hivoldano,&explicit/1-hivoldano,Volda University College,HVO,,,165,hivolda.no,Volda University College,Høgskulen i Volda,Høgskulen i Volda,Volda University College,Volda Allaskuvla,https://ror.org/01eeqzy24,Volda University College,HVO, +25,khio.no,khiono,&explicit/1-khiono,Oslo National Academy of the Arts,KhiO,,,195,khio.no,Oslo National Academy of the Arts,Kunsthøgskolen i Oslo,Kunsthøgskulen i Oslo,Oslo National Academy of the Arts,,https://ror.org/0543h9a62,Oslo National Academy of the Arts,, +26,samfunnsforskning.no,samfunnsforskningno,&explicit/1-samfunnsforskningno,Institute for Social Research,IFS,,,197,samfunnsforskning.no,Institute for Social Research,Institutt for samfunnsforskning,,Institute for Social Research,,https://ror.org/05swz5441,Institute for Social Research,IFS, +27,ldh.no,ldhno,&explicit/1-ldhno,Lovisenberg Diaconal University College,LDH,,,216,ldh.no,Lovisenberg diaconal university college,Lovisenberg diakonale høgskole,Lovisenberg diakonale høgskule,Lovisenberg diaconal university college,,https://ror.org/015rzvz05,Lovisenberg Diakonale Høgskole,LDH, +28,fhi.no,fhino,&explicit/1-fhino,Norwegian Institute of Public Health,NIPH,,,310,fhi.no,Norwegian Institute of Public Health,Folkehelseinstituttet,,Norwegian Institute of Public Health,,https://ror.org/046nvst19,Norwegian Institute of Public Health,NIPH, +29,nih.no,nihno,&explicit/1-nihno,Norwegian School of Sport Sciences,NSSS,,,323,nih.no,Norwegian School of Sport Sciences,Norges idrettshøgskole,Noregs idrettshøgskule,Norwegian School of Sport Sciences,,https://ror.org/045016w83,Norwegian School of Sport Sciences,NSSS, +30,bi.no,bino,&explicit/1-bino,BI Norwegian Business School,BI,,,324,bi.no,BI Norwegian Business School,Handelshøyskolen BI,Handelshøyskolen BI,BI Norwegian Business School,,https://ror.org/03ez40v33,BI Norwegian Business School,, +31,nmh.no,nmhno,&explicit/1-nmhno,Norwegian Academy of Music,NMH,,,325,nmh.no,Norwegian Academy of Music,Norges musikkhøgskole,Noregs musikkhøgskule,Norwegian Academy of Music,,https://ror.org/052dy9793,Norwegian Academy of Music,NMH, +32,kristiania.no,kristianiano,&explicit/1-kristianiano,Kristiania University College,,,,17007,feide.egms.no,Kristiania University College,Høyskolen Kristiania,Høyskolen Kristiania,Kristiania University College,,https://ror.org/03gss5916,Campus Kristiania,, +33,fhs.mil.no,fhsmilno,&explicit/1-fhsmilno,Norwegian Defence University College,NDUC,,,115267,mil.no,Norwegian Defence University College,Forsvarets høgskoler,Forsvarets høgskuler,Norwegian Defence University College,,https://ror.org/02vfz9j23,Norwegian Defence University College,NDUC, +34,ansgarskolen.no,ansgarskolenno,&explicit/1-ansgarskolenno,Ansgar University College,,,,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,, +35,oslomet.no,oslometno,&explicit/1-oslometno,OsloMet – Oslo Metropolitan University,HiOA,,,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA, +36,nmbu.no,nmbuno,&explicit/1-nmbuno,Norwegian University of Life Sciences,NMBU,2002-red-dataverseno-nmbu,cloudian-nmbu,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU, +37,nibio.no,nibiono,&explicit/1-nibiono,Norwegian Institute of Bioeconomy Research,NIBIO,,,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO, +38,vid.no,vidno,&explicit/1-vidno,VID Specialized University,VID,2002-red-dataverseno-vid,cloudian-vid,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID, +39,nord.no,nordno,&explicit/1-nordno,Nord University,,2002-red-dataverseno-nord,cloudian-nord,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,, +40,usn.no,usnno,&explicit/1-usnno,University of South-Eastern Norway,USN,,,2066647,usn.no,University of South-Eastern Norway,Universitetet i Sørøst-Norge,Universitetet i Søraust-Noreg,University of South-Eastern Norway,,https://ror.org/05ecg5h20,University of South-Eastern Norway,USN, +41,hvl.no,hvlno,&explicit/1-hvlno,Western Norway University of Applied Sciences,HVL,2002-red-dataverseno-hvl,cloudian-hvl,2126357,hvl.no,Western Norway University of Applied Sciences,Høgskulen på Vestlandet,Høgskulen på Vestlandet,Western Norway University of Applied Sciences,,https://ror.org/05phns765,Western Norway University of Applied Sciences,HVL, +42,nkvts.no,nkvtsno,&explicit/1-nkvtsno,Norwegian centre for violence and traumatic stress studies,NKVTS,,,2127917,nkvts.no,Norwegian centre for violence and traumatic stress studies,Nasjonalt kunnskapssenter om vold og traumatisk stress,,Norwegian centre for violence and traumatic stress studies,,https://ror.org/01p618c36,Norwegian Centre for Violence and Traumatic Stress Studies,NKVTS, +43,inn.no,innno,&explicit/1-innno,Inland Norway University of Applied Sciences,,2002-red-dataverseno-inn,cloudian-inn,2128215,inn.no,Inland Norway University of Applied Sciences,Høgskolen i Innlandet,,Inland Norway University of Applied Sciences,,https://ror.org/02dx4dc92,Inland Norway University of Applied Sciences,, +44,vetinst.no,vetinstno,&explicit/1-vetinstno,Norwegian Veterinary Institute,NVI,,,2217125,vetinst.no,Veterinærinstituttet,Veterinærinstituttet,,,,https://ror.org/05m6y3182,Norwegian Veterinary Institute,NVI, +45,nubu.no,nubuno,&explicit/1-nubuno,NUBU - The Norwegian Center for Child Behavioral Development,NUBU,,,2217221,nubu.no,NUBU - The Norwegian Center for Child Behavioral Development,NUBU - Nasjonalt utviklingssenter for barn og unge,,NUBU - The Norwegian Center for Child Behavioral Development,,,,, +46,hlsenteret.no,hlsenteretno,&explicit/1-hlsenteretno,The Norwegian Center for Holocaust and Minority Studies,,,,2217222,hlsenteret.no,The Norwegian Center for Holocaust and Minority Studies,Senter for studier av Holocaust og livssynsminoriteter,,The Norwegian Center for Holocaust and Minority Studies,,https://ror.org/03ppkyp25,Center for Studies of Holocaust and Religious Minorities,, +47,met.no,metno,&explicit/1-metno,Norwegian Meteorological Institute,MET,,,2217341,,Meteorologisk Institutt,Meteorologisk Institutt,,,,https://ror.org/001n36p86,Norwegian Meteorological Institute,MET, +48,simula.no,simulano,&explicit/1-simulano,Simula Research Laboratory,,,,2217477,simula.no,Simula,Simula,,,,https://ror.org/00vn06n10,Simula Research Laboratory,, +49,agderforskning.no,agderforskningno,&explicit/1-agderforskningno,Agder Research,,,,,,,,,,,https://ror.org/02k3w5n89,Agder Research,, +50,akvaplan.niva.no,akvaplannivano,&explicit/1-akvaplannivano,Akvaplan-niva,,,,,,,,,,,https://ror.org/03nrps502,Akvaplan-niva (Norway),, +51,arbark.no,arbarkno,&explicit/1-arbarkno,Norwegian Labour Movement Archives and Library,,,,,,,,,,,https://ror.org/05x91m338,Norwegian Labour Movement Archives and Library,, +52,cas.oslo.no,casoslono,&explicit/1-casoslono,Centre for Advanced Study,CAS,,,,,,,,,,https://ror.org/05rbhea42,Centre for Advanced Study,CAS, +53,cicero.oslo.no,cicerooslono,&explicit/1-cicerooslono,CICERO Center for International Climate Research,CICERO,,,,,,,,,,https://ror.org/01gw5dy53,CICERO Center for International Climate Research,CICERO, +54,cmr.no,cmrno,&explicit/1-cmrno,Christian Michelsen Research,CMR,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre +55,dataverse.no,dataverseno,&explicit/1-dataverseno,,,2002-red-dataverseno-dvno,cloudian-dvno,,,,,,,,,,,The storage bucket 2002-red-dataverseno-dvno / cloudian-dvno is used for the root/top collection. +56,DataverseNO Admin,DataverseNOAdmin,&explicit/1-DataverseNOAdmin,,,,,,,,,,,,,,, +57,DataverseNO Curator,DvNOCurator,&explicit/1-DvNOCurator,,,,,,,,,,,,,,, +58,DataverseNO Dataset Creator,DataverseNODatasetCreator,&explicit/1-DataverseNODatasetCreator,,,,,,,,,,,,,,, +59,diakonova.no,diakonovano,&explicit/1-diakonovano,Diakonova,,,,,,,,,,,,,, +60,fafo.no,fafono,&explicit/1-fafono,Fafo Foundation,,,,,,,,,,,https://ror.org/00ee9xb13,Fafo Foundation,, +61,ffi.no,ffino,&explicit/1-ffino,Norwegian Defence Research Establishment,FFI,,,,,,,,,,https://ror.org/0098gnz32,Norwegian Defence Research Establishment,FFI, +62,flymed.no,flymedno,&explicit/1-flymedno,Flymedisinsk institutt,,,,,,,,,,,,,, +63,fni.no,fnino,&explicit/1-fnino,Fridtjof Nansen Institute,FNI,,,,,,,,,,https://ror.org/04ep2t954,Fridtjof Nansen Institute,FNI, +64,genok.no,genokno,&explicit/1-genokno,GenØk – Centre for Biosafety,GenØk,,,,,,,,,,https://ror.org/027arfy53,GenØk,, +65,hi.no,hino,&explicit/1-hino,Norwegian Institute of Marine Research,IMR,,,,,,,,,,https://ror.org/05vg74d16,Norwegian Institute of Marine Research,IMR, +66,ife.no,ifeno,&explicit/1-ifeno,Institute for Energy Technology,IFE,,,,,,,,,,https://ror.org/02jqtg033,Institute for Energy Technology,IFE, +67,iris.no,irisno,&explicit/1-irisno,International Research Institute of Stavanger,IRIS,,,,,,,,,,https://ror.org/0502t5s28,International Research Institute of Stavanger,IRIS,Now part of NORCE Norwegian Research Centre +68,kifo.no,kifono,&explicit/1-kifono,"Institute for Church, Religion, and Worldview Research",KIFO,,,,,,,,,,https://ror.org/051p4t773,"Institute for Church, Religion, and Worldview Research",KIFO, +69,kreftregisteret.no,kreftregisteretno,&explicit/1-kreftregisteretno,Cancer Registry of Norway,CRN,,,,,,,,,,https://ror.org/03sm1ej59,Cancer Registry of Norway,CRN, +70,legeforeningen.no,legeforeningenno,&explicit/1-legeforeningenno,Den norske legeforening,,,,,,,,,,,,,, +71,moreforsk.no,moreforskno,&explicit/1-moreforskno,Møreforsking,,,,,,,,,,,https://ror.org/02w4kss89,Møreforsking (Norway),, +72,nersc.no,nerscno,&explicit/1-nerscno,Nansen Environmental and Remote Sensing Center,,,,,,,,,,,,,, +73,nfms.no,nfmsno,&explicit/1-nfmsno,Aeromedical Center of Norway,,,,,,,,,,,,,, +74,nforsk.no,nforskno,&explicit/1-nforskno,Nordland Research Institute,,,,,,,,,,,https://ror.org/02wvb2a30,Nordland Research Institute,, +75,ngi.no,ngino,&explicit/1-ngino,Norwegian Geotechnical Institute,NGI,,,,,,,,,,https://ror.org/032ksge37,Norwegian Geotechnical Institute,NGI, +76,niku.no,nikuno,&explicit/1-nikuno,Norwegian Institute for Cultural Heritage Research,NIKU,,,,,,,,,,https://ror.org/02xhrye98,Norwegian Institute for Cultural Heritage Research,NIKU, +77,nilu.no,niluno,&explicit/1-niluno,Norwegian Institute for Air Research,NILU,,,,,,,,,,https://ror.org/00q7d9z06,Norwegian Institute for Air Research,NILU, +78,niva.no,nivano,&explicit/1-nivano,Norwegian Institute for Water Research,NIVA,,,,,,,,,,https://ror.org/03hrf8236,Norwegian Institute for Water Research,NIVA, +79,nlr.no,nlrno,&explicit/1-nlrno,Norsk Landbruksrådgiving,NLR,,,,,,,,,,https://ror.org/03c1zct07,Norsk Landbruksrådgiving,NLR, +80,nobel.no,nobelno,&explicit/1-nobelno,Norwegian Nobel Institute,,,,,,,,,,,https://ror.org/055wgnw59,Norwegian Nobel Institute,, +81,nofima.no,nofimano,&explicit/1-nofimano,Nofima,,2002-red-dataverseno-nofi,cloudian-nofima,,,,,,,,https://ror.org/02v1rsx93,Nofima,, +82,norceresearch.no,norceresearchno,&explicit/1-norceresearchno,Norwegian Research Centre,NORCE,,,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE, +83,norsar.no,norsarno,&explicit/1-norsarno,Norwegian Seismic Array,,,,,,,,,,,https://ror.org/02vw8cm83,Norsar,, +84,norsok.no,norsokno,&explicit/1-norsokno,Norsk senter for økologisk landbruk,,,,,,,,,,,,,, +85,norsus.no,norsusno,&explicit/1-norsusno,Norwegian Institute for Sustainability Research,NORSUS,,,,,,,,,,,,, +86,norut.no,norutno,&explicit/1-norutno,Norut Northern Research Institute,Norut,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre +87,nupi.no,nupino,&explicit/1-nupino,Norwegian Institute of International Affairs,NUPI,,,,,,,,,,https://ror.org/01pznaa94,Norwegian Institute of International Affairs,NUPI, +88,ostfoldforskning.no,ostfoldforskningno,&explicit/1-ostfoldforskningno,Ostfold Research,,,,,,,,,,,https://ror.org/01vmqaq17,Ostfold Research (Norway),,Has changed name to +89,ostforsk.no,ostforskno,&explicit/1-ostforskno,Eastern Norway Research Institute,ENRI,,,,,,,,,,https://ror.org/020deqr47,Eastern Norway Research Institute,ENRI, +90,pfi.no,pfino,&explicit/1-pfino,Paper and Fibre Research Institute,PFI,,,,,,,,,,https://ror.org/053qb6g74,Paper and Fibre Research Institute,PFI, +91,prio.org,prioorg,&explicit/1-prioorg,Peace Research Institute,PRIO,,,,,,,,,,https://ror.org/04dx54y73,Peace Research Institute,PRIO, +92,risefr.no,risefrno,&explicit/1-risefrno,RISE Fire Research,RISE,,,,,,,,,,,,, +93,ruralis.no,ruralisno,&explicit/1-ruralisno,Institute for Rural and Regional Research,CRR,,,,,,,,,,https://ror.org/0169gd037,Centre for Rural Research,CRR,Note: The ROR entry is not up to date. +94,sik.no,sikno,&explicit/1-sikno,Centre for Intercultural Communication,SIK,,,,,,,,,,,,,Now part of VID +95,snf.no,snfno,&explicit/1-snfno,Centre for Applied Research,,,,,,,,,,,,,, +96,stami.no,stamino,&explicit/1-stamino,National Institute of Occupational Health,NIOH,,,,,,,,,,https://ror.org/04g3t6s80,National Institute of Occupational Health,NIOH, +97,teknova.no,teknovano,&explicit/1-teknovano,Teknova,,,,,,,,,,,https://ror.org/02ekw8p73,Teknova,,Now part of NORCE Norwegian Research Centre +98,tel-tek.no,tel-tekno,&explicit/1-tel-tekno,Tel-Tek,,,,,,,,,,,,,,Now part of SINTEF +99,tfou.no,tfouno,&explicit/1-tfouno,Trøndelag Forskning og Utvikling,TFOU,,,,,,,,,,https://ror.org/01hw8wm79,Trøndelag Forskning og Utvikling (Norway),TFOU,Now part of SINTEF +100,tisip.no,tisipno,&explicit/1-tisipno,TISIP,,,,,,,,,,,,,, +101,tmforsk.no,tmforskno,&explicit/1-tmforskno,Telemark Research Institute,TRI,,,,,,,,,,https://ror.org/02jjgkb92,Telemark Research Institute,TRI, +102,toi.no,toino,&explicit/1-toino,Institute of Transport Economics,TØI,,,,,,,,,,https://ror.org/04p2pa451,Institute of Transport Economics,TØI, +103,treteknisk.no,tretekniskno,&explicit/1-tretekniskno,Norwegian Institute of Wood Technology,NTI,,,,,,,,,,https://ror.org/00d5qfz16,Norwegian Institute of Wood Technology,NTI, +104,uni.no,unino,&explicit/1-unino,Uni Research,,,,,,,,,,,https://ror.org/016tr2j79,Uni Research (Norway),,Now part of NORCE Norwegian Research Centre +105,vestforsk.no,vestforskno,&explicit/1-vestforskno,Western Norway Research Institute,WRNI,,,,,,,,,,https://ror.org/04z1q2j11,Vestlandsforsking,WRNI, +106,westerdals.no,westerdalsno,&explicit/1-westerdalsno,"Westerdals Oslo School of Arts, Communication and Technology",,,,,,,,,,,https://ror.org/02re25503,"Westerdals Oslo School of Arts, Communication and Technology",,Now part of Kristiania +107,unn.no,unnno,&explicit/1-unnno,University Hospital of North Norway,UNN,,,,,,,,,,https://ror.org/030v5kp38,University Hospital of North Norway,UNN, +108,helse-vest.no,helse-vestno,&explicit/1-helse-vestno,Western Norway Regional Health Authority,,,,,,,,,,,https://ror.org/001212e83,Western Norway Regional Health Authority,, +109,helse-forde.no,helse-fordeno,&explicit/1-helse-fordeno,Helse Førde,,,,,,,,,,,https://ror.org/05dzsmt79,Helse Førde,, +110,helse-bergen.no,helse-bergenno,&explicit/1-helse-bergenno,Helse Bergen,,,,,,,,,,,,,, +111,helse-fonna.no,helse-fonnano,&explicit/1-helse-fonnano,Helse Fonna,,,,,,,,,,,,,, +112,sus.no,susno,&explicit/1-susno,Stavanger University Hospital,SUS,,,,,,,,,,https://ror.org/04zn72g03,Stavanger University Hospital,SUS, +113,helse-midt.no,helse-midtno,&explicit/1-helse-midtno,Central Norway Regional Health Authority,,,,,,,,,,,https://ror.org/04t838f48,Central Norway Regional Health Authority,, +114,helse-mr.no,helse-mrno,&explicit/1-helse-mrno,Helse Møre og Romsdal,,,,,,,,,,,https://ror.org/05ka2ew29,Helse Møre og Romsdal,, +115,stolav.no,stolavno,&explicit/1-stolavno,St Olav's University Hospital,,,,,,,,,,,https://ror.org/01a4hbq44,St Olav's University Hospital,, +116,hnt.no,hntno,&explicit/1-hntno,Helse Nord-Trøndelag,,,,,,,,,,,,,, +117,helse-nord.no,helse-nordno,&explicit/1-helse-nordno,Northern Norway Regional Health Authority,,,,,,,,,,,https://ror.org/05f6c0c45,Northern Norway Regional Health Authority,, +118,helgelandssykehuset.no,helgelandssykehusetno,&explicit/1-helgelandssykehusetno,Helgelandssykehuset,,,,,,,,,,,,,, +119,finnmarkssykehuset.no,finnmarkssykehusetno,&explicit/1-finnmarkssykehusetno,Finnmarkssykehuset,,,,,,,,,,,https://ror.org/04z1ebj23,Finnmarkssykehuset,, +120,nordlandssykehuset.no,nordlandssykehusetno,&explicit/1-nordlandssykehusetno,Nordland Hospital Trust,,,,,,,,,,,https://ror.org/04wjd1a07,Nordland Hospital Trust,, +121,helse-sorost.no,helse-sorostno,&explicit/1-helse-sorostno,Southern and Eastern Norway Regional Health Authority,,,,,,,,,,,https://ror.org/02qx2s478,Southern and Eastern Norway Regional Health Authority,, +122,ahus.no,ahusno,&explicit/1-ahusno,Akershus University Hospital,Ahus,,,,,,,,,,https://ror.org/0331wat71,Akershus University Hospital,, +123,oslo-universitetssykehus.no,oslo-universitetssykehusno,&explicit/1-oslo-universitetssykehusno,Oslo University Hospital,,,,,,,,,,,https://ror.org/00j9c2840,Oslo University Hospital,, From 3b89cf712d39d8f0edfd883956a33ef1088cb1c6 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 12 Jul 2022 09:11:29 +0000 Subject: [PATCH 014/354] customization xhtml --- .../modification/Bundle.properties | 2681 +++++++++++++++++ .../dataverse.no/modification/analytics.xhtml | 23 + .../modification/dataverse_footer.xhtml | 92 + .../modification/dataverse_header.xhtml | 407 +++ .../dataverse.no/modification/loginpage.xhtml | 238 ++ 5 files changed, 3441 insertions(+) create mode 100644 distros/dataverse.no/modification/Bundle.properties create mode 100644 distros/dataverse.no/modification/analytics.xhtml create mode 100644 distros/dataverse.no/modification/dataverse_footer.xhtml create mode 100644 distros/dataverse.no/modification/dataverse_header.xhtml create mode 100644 distros/dataverse.no/modification/loginpage.xhtml diff --git a/distros/dataverse.no/modification/Bundle.properties b/distros/dataverse.no/modification/Bundle.properties new file mode 100644 index 0000000..6266510 --- /dev/null +++ b/distros/dataverse.no/modification/Bundle.properties @@ -0,0 +1,2681 @@ +dataverse=Dataverse +newDataverse=New Dataverse +hostDataverse=Host Dataverse +dataverses=Dataverses +passwd=Password +dataset=Dataset +datasets=Datasets +newDataset=New Dataset +files=Files +file=File +public=Public +restricted=Restricted +restrictedaccess=Restricted with Access Granted +find=Find +search=Search +language=Language +created=Created +deposited=Deposited +published=Published +unpublished=Unpublished +cancel=Cancel +ok=OK +saveChanges=Save Changes +acceptTerms=Accept +submit=Submit +signup=Sign Up +login=Log In +email=Email +account=Account +requiredField=Required field +new=New +identifier=Identifier +description=Description +subject=Subject +close=Close +preview=Preview +continue=Continue +name=Name +institution=Institution +position=Position +affiliation=Affiliation +storage=Storage +createDataverse=Create Dataverse +remove=Remove +done=Done +editor=Contributor +manager=Manager +curator=Curator +explore=Explore +download=Download +downloadOriginal=Original Format +downloadArchival=Archival Format (.tab) +deaccession=Deaccession +share=Share +link=Link +linked=Linked +harvested=Harvested +apply=Apply +add=Add +delete=Delete +copyClipboard=Copy to Clipboard +truncateMoreBtn=Read full {0} [+] +truncateMoreTip=Click to read the full {0}. +truncateLessBtn=Collapse {0} [+] +truncateLessTip=Click to collapse the {0}. +yes=Yes +no=No +previous=Previous +next=Next +first=First +last=Last +more=More... +less=Less... +select=Select... +selectedFiles=Selected Files +htmlAllowedTitle=Allowed HTML Tags +htmlAllowedMsg=This field supports only certain HTML tags. +htmlAllowedTags=<a>, <b>, <blockquote>, <br>, <code>, <del>, <dd>, <dl>, <dt>, <em>, <hr>, <h1>-<h3>, <i>, <img>, <kbd>, <li>, <ol>, <p>, <pre>, <s>, <sup>, <sub>, <strong>, <strike>, <u>, <ul> +conditionalRequiredMsg=One or more of these fields may become required if you add to one or more of these optional fields. +conditionalRequiredMsg.tooltip=This field will become required if you choose to enter values in one or more of these optional fields. +toggleNavigation=Toggle navigation +defaultBody=Default Body +loading=Loading... +filter=Filter +to=to +of=of +alt.logo={0} logo +alt.homepage={0} homepage + +# dataverse_header.xhtml +header.noscript=Please enable JavaScript in your browser. It is required to use most of the features of Dataverse. +header.status.header=Status +header.search.title=Search all dataverses... +header.about=About +header.support=Support +header.guides=Guides +header.guides.user=User Guide +header.guides.developer=Developer Guide +header.guides.installation=Installation Guide +header.guides.api=API Guide +header.guides.admin=Admin Guide +header.signUp=Sign Up +header.logOut=Log Out +header.accountInfo=Account Information +header.dashboard=Dashboard +header.user.selectTab.dataRelated=My Data +header.user.selectTab.notifications=Notifications +header.user.selectTab.accountInfo=Account Information +header.user.selectTab.groupsAndRoles=Groups + Roles +header.user.selectTab.apiToken=API Token + +# dataverse_template.xhtml +head.meta.description=The Dataverse Project is an open source software application to share, cite and archive data. Dataverse provides a robust infrastructure for data stewards to host and archive data, while offering researchers an easy way to share and get credit for their data. +body.skip=Skip to main content + +# dataverse_footer.xhtml +footer.copyright=Copyright © {0} +footer.widget.datastored=Data is stored at {0}. +footer.widget.login=Log in to +footer.privacyPolicy=Privacy Policy +footer.poweredby=Powered by +footer.dataverseProject=The Dataverse Project + +# messages.xhtml +messages.error=Error +messages.success=Success! +messages.info=Info +messages.validation=Validation Error +messages.validation.msg=Required fields were missed or there was a validation error. Please scroll down to see details. + +# contactFormFragment.xhtml +contact.header=Contact {0} +contact.dataverse.header=Email Dataverse Contact +contact.dataset.header=Email Dataset Contact +contact.to=To +contact.support=Support +contact.from=From +contact.from.required=User email is required. +contact.from.invalid=Email is invalid. +contact.subject=Subject +contact.subject.required=Subject is required. +contact.subject.selectTab.top=Select subject... +contact.subject.selectTab.support=Support Question +contact.subject.selectTab.dataIssue=Data Issue +contact.msg=Message +contact.msg.required=Message text is required. +contact.send=Send Message +contact.question=Please fill this out to prove you are not a robot. +contact.sum.title=Human Access Validation Answer +contact.sum.required=Value is required. +contact.sum.invalid=Incorrect sum, please try again. +contact.sum.converterMessage=Please enter a number. +contact.contact=Contact +# Bundle file editors, please note that these "contact.context" messages are used in tests. +contact.context.subject.dvobject={0} contact: {1} +contact.context.subject.support={0} support request: {1} +contact.context.dataverse.intro={0}You have just been sent the following message from {1} via the {2} hosted dataverse named "{3}":\n\n---\n\n +contact.context.dataverse.ending=\n\n---\n\n{0}\n{1}\n\nGo to dataverse {2}/dataverse/{3}\n\nYou received this email because you have been listed as a contact for the dataverse. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email. +contact.context.dataverse.noContact=There is no contact address on file for this dataverse so this message is being sent to the system address.\n\n +contact.context.dataset.greeting.helloFirstLast=Hello {0} {1}, +contact.context.dataset.greeting.organization=Attention Dataset Contact: +contact.context.dataset.intro={0}\n\nYou have just been sent the following message from {1} via the {2} hosted dataset titled "{3}" ({4}):\n\n---\n\n +contact.context.dataset.ending=\n\n---\n\n{0}\n{1}\n\nGo to dataset {2}/dataset.xhtml?persistentId={3}\n\nYou received this email because you have been listed as a contact for the dataset. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email. +contact.context.dataset.noContact=There is no contact address on file for this dataset so this message is being sent to the system address.\n\n---\n\n +contact.context.file.intro={0}\n\nYou have just been sent the following message from {1} via the {2} hosted file named "{3}" from the dataset titled "{4}" ({5}):\n\n---\n\n +contact.context.file.ending=\n\n---\n\n{0}\n{1}\n\nGo to file {2}/file.xhtml?fileId={3}\n\nYou received this email because you have been listed as a contact for the dataset. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email. +contact.context.support.intro={0},\n\nThe following message was sent from {1}.\n\n---\n\n +contact.context.support.ending=\n\n---\n\nMessage sent from Support contact form. + +# dataverseuser.xhtml +account.info=Account Information +account.edit=Edit Account +account.apiToken=API Token +account.emailvalidation.header=Email Validation +account.emailvalidation.token.exists=A verification email has been sent to {0}. Please check your inbox. +user.isShibUser=Account information cannot be edited when logged in through an institutional account. +user.helpShibUserMigrateOffShibBeforeLink=Leaving your institution? Please contact +user.helpShibUserMigrateOffShibAfterLink=for assistance. +user.helpOAuthBeforeLink=Your Dataverse account uses {0} for login. If you are interested in changing login methods, please contact +user.helpOAuthAfterLink=for assistance. +user.lostPasswdTip=If you have lost or forgotten your password, please enter your username or email address below and click Submit. We will send you an e-mail with your new password. +user.dataRelatedToMe=My Data +wasCreatedIn=, was created in +wasCreatedTo=, was added to +wasSubmittedForReview=, was submitted for review to be published in +wasPublished=, was published in +wasReturnedByReviewer=, was returned by the curator of +# TODO: Confirm that "toReview" can be deleted. +toReview=Don't forget to publish it or send it back to the contributor! +# Bundle file editors, please note that "notification.welcome" is used in a unit test. +#notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address. +#UB +notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check our user guides at http://site.uit.no/dataverseno/deposit/ or contact DataverseNO support for assistance: http://site.uit.no/dataverseno/support/. + +notification.demoSite=Demo Site +notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}). +notification.grantFileAccess=Access granted for files in dataset: {0}. +notification.rejectFileAccess=Access rejected for requested files in dataset: {0}. +notification.createDataverse={0} was created in {1} . To learn more about what you can do with your dataverse, check out the {2}. +notification.dataverse.management.title=Dataverse Management - Dataverse User Guide +notification.createDataset={0} was created in {1}. To learn more about what you can do with a dataset, check out the {2}. +notification.dataset.management.title=Dataset Management - Dataset User Guide +notification.wasSubmittedForReview={0} was submitted for review to be published in {1}. Don''t forget to publish it or send it back to the contributor, {2} ({3})\! +notification.wasReturnedByReviewer={0} was returned by the curator of {1}. +notification.wasPublished={0} was published in {1}. +notification.publishFailedPidReg={0} in {1} could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. +notification.workflowFailed=An external workflow run on {0} in {1} has failed. Check your email and/or view the Dataset page which may have additional details. Contact support if this continues to happen. +notification.workflowSucceeded=An external workflow run on {0} in {1} has succeeded. Check your email and/or view the Dataset page which may have additional details. + +notification.ingestCompleted=Dataset {1} ingest has successfully finished. +notification.ingestCompletedWithErrors=Dataset {1} ingest has finished with errors. +notification.generic.objectDeleted=The dataverse, dataset, or file for this notification has been deleted. +notification.access.granted.dataverse=You have been granted the {0} role for {1}. +notification.access.granted.dataset=You have been granted the {0} role for {1}. +notification.access.granted.datafile=You have been granted the {0} role for file in {1}. +notification.access.granted.fileDownloader.additionalDataverse={0} You now have access to all published restricted and unrestricted files in this dataverse. +notification.access.granted.fileDownloader.additionalDataset={0} You now have access to all published restricted and unrestricted files in this dataset. +notification.access.revoked.dataverse=You have been removed from a role in {0}. +notification.access.revoked.dataset=You have been removed from a role in {0}. +notification.access.revoked.datafile=You have been removed from a role in {0}. +notification.checksumfail=One or more files in your upload failed checksum validation for dataset {1}. Please re-run the upload script. If the problem persists, please contact support. +notification.ingest.completed=Dataset {2} ingest process has successfully finished.

Ingested files:{3}
+notification.ingest.completedwitherrors=Dataset {2} ingest process has finished with errors.

Ingested files:{3}
+notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. +notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. +notification.import.checksum={1}, dataset had file checksums added via a batch job. +removeNotification=Remove Notification +groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. +user.message.signup.label=Create Account +user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. +user.signup.otherLogInOptions.tip=You can also create a Dataverse account with one of our other log in options. +user.username.illegal.tip=Between 2-60 characters, and can use "a-z", "0-9", "_" for your username. +user.username=Username +user.username.taken=This username is already taken. +user.username.invalid=This username contains an invalid character or is outside the length requirement (2-60 characters). +user.username.valid=Create a valid username of 2 to 60 characters in length containing letters (a-Z), numbers (0-9), dashes (-), underscores (_), and periods (.). +user.noPasswd=No Password +user.currentPasswd=Current Password +user.currentPasswd.tip=Please enter the current password for this account. +user.passwd.illegal.tip=Password needs to be at least 6 characters, include one letter and one number, and special characters may be used. +user.rePasswd=Retype Password +user.rePasswd.tip=Please retype the password you entered above. +user.firstName=Given Name +user.firstName.tip=The first name or name you would like to use for this account. +user.lastName=Family Name +user.lastName.tip=The last name you would like to use for this account. +user.email.tip=A valid email address you have access to in order to be contacted. +user.email.taken=This email address is already taken. +user.affiliation.tip=The organization with which you are affiliated. +user.position=Position +user.position.tip=Your role or title at the organization you are affiliated with; such as staff, faculty, student, etc. +user.acccountterms=General Terms of Use +user.acccountterms.tip=The terms and conditions for using the application and services. +user.acccountterms.required=Please check the box to indicate your acceptance of the General Terms of Use. +user.acccountterms.iagree=I have read and accept the Dataverse General Terms of Use as outlined above. +user.createBtn=Create Account +user.updatePassword.welcome=Welcome to Dataverse {0} +user.updatePassword.warning=With the release of our new Dataverse 4.0 upgrade, the password requirements and General Terms of Use have updated. As this is the first time you are using Dataverse since the update, you need to create a new password and agree to the new General Terms of Use. +user.updatePassword.password={0} +user.password=Password +user.newPassword=New Password +authenticationProvidersAvailable.tip={0}There are no active authentication providers{1}If you are a system administrator, please enable one using the API.{2}If you are not a system administrator, please contact the one for your institution. + +passwdVal.passwdReq.title=Your password must contain: +passwdVal.passwdReq.goodStrength=passwords of at least {0} characters are exempt from all other requirements +passwdVal.passwdReq.lengthReq=At least {0} characters +passwdVal.passwdReq.characteristicsReq=At least 1 character from {0} of the following types: +passwdVal.passwdReq.notInclude=It may not include: +passwdVal.passwdReq.consecutiveDigits=More than {0} numbers in a row +passwdVal.passwdReq.dictionaryWords=Dictionary words +passwdVal.passwdReq.unknownPasswordRule=Unknown, contact your administrator +#printf syntax used to pass to passay library +passwdVal.expireRule.errorCode=EXPIRED +passwdVal.expireRule.errorMsg=The password is over %1$s days old and has expired. +passwdVal.goodStrengthRule.errorMsg=Note: passwords are always valid with a %1$s or more character length regardless. +passwdVal.goodStrengthRule.errorCode=NO_GOODSTRENGTH +passwdVal.passwdReset.resetLinkTitle=Password Reset Link +passwdVal.passwdReset.resetLinkDesc=Your password reset link is not valid +passwdVal.passwdReset.resetInitiated=Password Reset Initiated +passwdVal.passwdReset.valBlankLog=new password is blank +passwdVal.passwdReset.valFacesError=Password Error +passwdVal.passwdReset.valFacesErrorDesc=Please enter a new password for your account. +passwdVal.passwdValBean.warnDictionaryRead=Dictionary was set, but none was read in. +passwdVal.passwdValBean.warnDictionaryObj=PwDictionaries not set and no default password file found: +passwdVal.passwdValBean.warnSetStrength=The PwGoodStrength {0} value competes with the PwMinLength value of {1} and is added to {2} + +# passwordreset.xhtml +pageTitle.passwdReset.pre=Account Password Reset +passwdReset.token=token : +passwdReset.userLookedUp=user looked up : +passwdReset.emailSubmitted=email submitted : +passwdReset.details={0} Password Reset{1} - To initiate the password reset process, please provide your email address. +passwdReset.submitRequest=Submit Password Request +passwdReset.successSubmit.tip=If this email is associated with an account, then an email will be sent with further instructions to {0}. +passwdReset.debug=DEBUG +passwdReset.resetUrl=The reset URL is +passwdReset.noEmail.tip=No email was actually sent because a user could not be found using the provided email address {0} but we don't mention this because we don't malicious users to use the form to determine if there is an account associated with an email address. +passwdReset.illegalLink.tip=Your password reset link is not valid. If you need to reset your password, {0}click here{1} in order to request that your password to be reset again. +passwdReset.newPasswd.details={0} Reset Password{1} \u2013 Our password requirements have changed. Please pick a strong password that matches the criteria below. +passwdReset.newPasswd=New Password +passwdReset.rePasswd=Retype Password +passwdReset.resetBtn=Reset Password + +#loginpage.xhtml +login.System=Login System +login.forgot.text=Forgot your password? +login.builtin=Dataverse Account +login.institution=Institutional Account +#login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. +#UB +login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. + + +#login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance. +#UB +login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance. + + +login.builtin.credential.usernameOrEmail=Username/Email +login.builtin.credential.password=Password +login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? +login.signup.blurb=Sign up for a Dataverse account. +login.echo.credential.name=Name +login.echo.credential.email=Email +login.echo.credential.affiliation=Affiliation +# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922 +login.error=Error validating the username, email address, or password. Please try again. If the problem persists, contact an administrator. +user.error.cannotChangePassword=Sorry, your password cannot be changed. Please contact your system administrator. +user.error.wrongPassword=Sorry, wrong password. +login.button=Log In with {0} +login.button.orcid=Create or Connect your ORCID +# authentication providers +auth.providers.title=Other options +auth.providers.tip=You can convert a Dataverse account to use one of the options above. More information about account creation. +auth.providers.title.builtin=Username/Email +auth.providers.title.shib=Your Institution +auth.providers.title.orcid=ORCID +auth.providers.title.google=Google +auth.providers.title.github=GitHub +auth.providers.blurb=Log in or sign up with your {0} account — more information about account creation. Having trouble? Please contact {3} for assistance. +auth.providers.persistentUserIdName.orcid=ORCID iD +auth.providers.persistentUserIdName.github=ID +auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers. +auth.providers.persistentUserIdTooltip.github=GitHub assigns a unique number to every user. +auth.providers.insufficientScope=Dataverse was not granted the permission to read user data from {0}. +auth.providers.exception.userinfo=Error getting the user info record from {0}. +auth.providers.token.failRetrieveToken=Dataverse could not retrieve an access token. +auth.providers.token.failParseToken=Dataverse could not parse the access token. +auth.providers.token.failGetUser=Dataverse could not get your user record. Please consult your administrator. +auth.providers.orcid.helpmessage1=ORCID is an open, non-profit, community-based effort to provide a registry of unique researcher identifiers and a transparent method of linking research activities and outputs to these identifiers. ORCID is unique in its ability to reach across disciplines, research sectors, and national boundaries and its cooperation with other identifier systems. Find out more at orcid.org/about. +auth.providers.orcid.helpmessage2=This repository uses your ORCID for authentication (so you don't need another username/password combination). Having your ORCID associated with your datasets also makes it easier for people to find the datasets you have published. + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth + +#confirmemail.xhtml +confirmEmail.pageTitle=Email Verification +confirmEmail.submitRequest=Verify Email +confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}. +confirmEmail.details.success=Email address verified! +confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button. +confirmEmail.details.goToAccountPageButton=Go to Account Information +confirmEmail.notVerified=Not Verified +confirmEmail.verified=Verified + +#shib.xhtml +shib.btn.convertAccount=Convert Account +shib.btn.createAccount=Create Account +shib.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test +shib.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test +shib.welcomeExistingUserMessageDefaultInstitution=your institution +shib.dataverseUsername=Dataverse Username +shib.currentDataversePassword=Current Dataverse Password +shib.accountInformation=Account Information +shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account. +shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. + +# oauth2/firstLogin.xhtml +oauth2.btn.convertAccount=Convert Existing Account +oauth2.btn.createAccount=Create New Account +oauth2.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +oauth2.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +oauth2.welcomeExistingUserMessageDefaultInstitution=your institution +oauth2.dataverseUsername=Dataverse Username +oauth2.currentDataversePassword=Current Dataverse Password +oauth2.chooseUsername=Username: +oauth2.passwordRejected=Validation Error - Wrong username or password. +# oauth2.newAccount.title=Account Creation +oauth2.newAccount.welcomeWithName=Welcome to Dataverse, {0} +oauth2.newAccount.welcomeNoName=Welcome to Dataverse +# oauth2.newAccount.email=Email +# oauth2.newAccount.email.tip=Dataverse uses this email to notify you of issues regarding your data. +oauth2.newAccount.suggestedEmails=Suggested Email Addresses: +oauth2.newAccount.username=Username +oauth2.newAccount.username.tip=This username will be your unique identifier as a Dataverse user. +oauth2.newAccount.explanation=This information is provided by {0} and will be used to create your {1} account. To log in again, you will have to use the {0} log in option. +oauth2.newAccount.suggestConvertInsteadOfCreate=If you already have a {0} account, you will need to convert your account. +# oauth2.newAccount.tabs.convertAccount=Convert Existing Account +oauth2.newAccount.buttons.convertNewAccount=Convert Account +oauth2.newAccount.emailTaken=Email already taken. Consider merging the corresponding account instead. +oauth2.newAccount.emailOk=Email OK. +oauth2.newAccount.emailInvalid=Invalid email address. +# oauth2.newAccount.usernameTaken=Username already taken. +# oauth2.newAccount.usernameOk=Username OK. + +# oauth2/convert.xhtml +# oauth2.convertAccount.title=Account Conversion +oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account. +oauth2.convertAccount.username=Existing username +oauth2.convertAccount.password=Password +oauth2.convertAccount.authenticationFailed=Your account can only be converted if you provide the correct username and password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. +oauth2.convertAccount.buttonTitle=Convert Account +oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account. +oauth2.convertAccount.failedDeactivated=Your existing account cannot be converted because it has been deactivated. + +# oauth2/callback.xhtml +oauth2.callback.page.title=OAuth Callback +oauth2.callback.message=Authentication Error - Dataverse could not authenticate your login with the provider that you selected. Please make sure you authorize your account to connect with Dataverse. For more details about the information being requested, see the User Guide. + +# deactivated user accounts +deactivated.error=Sorry, your account has been deactivated. + +# tab on dataverseuser.xhtml +apitoken.title=API Token +apitoken.message=Your API Token is valid for a year. Check out our {0}API Guide{1} for more information on using your API Token with the Dataverse APIs. +apitoken.notFound=API Token for {0} has not been created. +apitoken.expired.warning=This token is about to expire, please generate a new one. +apitoken.expired.error=This token is expired, please generate a new one. +apitoken.generateBtn=Create Token +apitoken.regenerateBtn=Recreate Token +apitoken.revokeBtn=Revoke Token +apitoken.expirationDate.label=Expiration Date + +#dashboard.xhtml +dashboard.title=Dashboard +dashboard.card.harvestingclients.header=Harvesting Clients +dashboard.card.harvestingclients.btn.manage=Manage Clients +dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients} +dashboard.card.harvestingclients.datasets={0, choice, 0#Datasets|1#Dataset|2#Datasets} +dashboard.card.harvestingserver.header=Harvesting Server +dashboard.card.harvestingserver.enabled=OAI server enabled +dashboard.card.harvestingserver.disabled=OAI server disabled +dashboard.card.harvestingserver.status=Status +dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets} +dashboard.card.harvestingserver.btn.manage=Manage Server +dashboard.card.metadataexport.header=Metadata Export +dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}. + +#harvestclients.xhtml +harvestclients.title=Manage Harvesting Clients +harvestclients.toptip=Harvesting can be scheduled to run at a specific time or on demand. Harvesting can be initiated here or via the REST API. +harvestclients.noClients.label=No clients are configured. +harvestclients.noClients.why.header=What is Harvesting? +harvestclients.noClients.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting client, your Dataverse gathers metadata records from remote sources. These can be other Dataverse instances, or other archives that support OAI-PMH, the standard harvesting protocol. +harvestclients.noClients.why.reason2=Harvested metadata records are searchable by users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation. +harvestclients.noClients.how.header=How To Use Harvesting +harvestclients.noClients.how.tip1=To harvest metadata, a Harvesting Client is created and configured for each remote repository. Note that when creating a client you will need to select an existing local dataverse to host harvested datasets. +harvestclients.noClients.how.tip2=Harvested records can be kept in sync with the original repository through scheduled incremental updates, for example, daily or weekly. Alternatively, harvests can be run on demand, from this page or via the REST API. +harvestclients.noClients.getStarted=To get started, click on the Add Client button above. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestclients.btn.add=Add Client +harvestclients.tab.header.name=Nickname +harvestclients.tab.header.url=URL +harvestclients.tab.header.lastrun=Last Run +harvestclients.tab.header.lastresults=Last Results +harvestclients.tab.header.action=Actions +harvestclients.tab.header.action.btn.run=Run Harvesting +harvestclients.tab.header.action.btn.edit=Edit +harvestclients.tab.header.action.btn.delete=Delete +harvestclients.tab.header.action.btn.delete.dialog.header=Delete Harvesting Client +harvestclients.tab.header.action.btn.delete.dialog.warning=Are you sure you want to delete the harvesting client "{0}"? Deleting the client will delete all datasets harvested from this remote server. +harvestclients.tab.header.action.btn.delete.dialog.tip=Note, this action may take a while to process, depending on the number of harvested datasets. +harvestclients.tab.header.action.delete.infomessage=Harvesting client is being deleted. Note, that this may take a while, depending on the amount of harvested content. +harvestclients.actions.runharvest.success=Successfully started an asynchronous harvest for client "{0}" . Please reload the page to check on the harvest results. +harvestclients.newClientDialog.step1=Step 1 of 4 - Client Information +harvestclients.newClientDialog.title.new=Create Harvesting Client +harvestclients.newClientDialog.help=Configure a client to harvest content from a remote server. +harvestclients.newClientDialog.nickname=Nickname +harvestclients.newClientDialog.nickname.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestclients.newClientDialog.nickname.required=Client nickname cannot be empty! +harvestclients.newClientDialog.nickname.invalid=Client nickname can contain only letters, digits, underscores (_) and dashes (-); and must be at most 30 characters. +harvestclients.newClientDialog.nickname.alreadyused=This nickname is already used. +harvestclients.newClientDialog.type=Server Protocol +harvestclients.newClientDialog.type.helptext=Only the OAI server protocol is currently supported. +harvestclients.newClientDialog.type.OAI=OAI +harvestclients.newClientDialog.type.Nesstar=Nesstar +harvestclients.newClientDialog.url=Server URL +harvestclients.newClientDialog.url.tip=URL of a harvesting resource. +harvestclients.newClientDialog.url.watermark=Remote harvesting server, http://... +harvestclients.newClientDialog.url.helptext.notvalidated=URL of a harvesting resource. Once you click 'Next', we will try to establish a connection to the server in order to verify that it is working, and to obtain extra information about its capabilities. +harvestclients.newClientDialog.url.required=A valid harvesting server address is required. +harvestclients.newClientDialog.url.invalid=Invalid URL. Failed to establish connection and receive a valid server response. +harvestclients.newClientDialog.url.noresponse=Failed to establish connection to the server. +harvestclients.newClientDialog.url.badresponse=Invalid response from the server. +harvestclients.newClientDialog.dataverse=Local Dataverse +harvestclients.newClientDialog.dataverse.tip=Dataverse that will host the datasets harvested from this remote resource. +harvestclients.newClientDialog.dataverse.menu.enterName=Enter Dataverse Alias +harvestclients.newClientDialog.dataverse.menu.header=Dataverse Name (Affiliate), Alias +harvestclients.newClientDialog.dataverse.menu.invalidMsg=No matches found +harvestclients.newClientDialog.dataverse.required=You must select an existing dataverse for this harvesting client. +harvestclients.newClientDialog.step2=Step 2 of 4 - Format +harvestclients.newClientDialog.oaiSets=OAI Set +harvestclients.newClientDialog.oaiSets.tip=Harvesting sets offered by this OAI server. +harvestclients.newClientDialog.oaiSets.noset=None +harvestclients.newClientDialog.oaiSets.helptext=Selecting "none" will harvest the default set, as defined by the server. Often this will be the entire body of content across all sub-sets. +harvestclients.newClientDialog.oaiSets.helptext.noset=This OAI server does not support named sets. The entire body of content offered by the server will be harvested. +harvestclients.newClientDialog.oaiSets.listTruncated=Please note that the remote server was taking too long to return the full list of available OAI sets, so the list was truncated. Please select a set from the current list (or select the "no set" option), and try again later, if you need to change it. +harvestclients.newClientDialog.oaiMetadataFormat=Metadata Format +harvestclients.newClientDialog.oaiMetadataFormat.tip=Metadata formats offered by the remote server. +harvestclients.newClientDialog.oaiMetadataFormat.required=Please select the metadata format to harvest from this archive. +harvestclients.newClientDialog.step3=Step 3 of 4 - Schedule +harvestclients.newClientDialog.schedule=Schedule +harvestclients.newClientDialog.schedule.tip=Schedule harvesting to run automatically daily or weekly. +harvestclients.newClientDialog.schedule.time.none.helptext=Leave harvesting unscheduled to run on demand only. +harvestclients.newClientDialog.schedule.none=None +harvestclients.newClientDialog.schedule.daily=Daily +harvestclients.newClientDialog.schedule.weekly=Weekly +harvestclients.newClientDialog.schedule.time=Time +harvestclients.newClientDialog.schedule.day=Day +harvestclients.newClientDialog.schedule.time.am=AM +harvestclients.newClientDialog.schedule.time.pm=PM +harvestclients.newClientDialog.schedule.time.helptext=Scheduled times are in your local time. +harvestclients.newClientDialog.btn.create=Create Client +harvestclients.newClientDialog.success=Successfully created harvesting client "{0}". +harvestclients.newClientDialog.step4=Step 4 of 4 - Display +harvestclients.newClientDialog.harvestingStyle=Archive Type +harvestclients.newClientDialog.harvestingStyle.tip=Type of remote archive. +harvestclients.newClientDialog.harvestingStyle.helptext=Select the archive type that best describes this remote server in order to properly apply formatting rules and styles to the harvested metadata as they are shown in the search results. Note that improperly selecting the type of the remote archive can result in incomplete entries in the search results, and a failure to redirect the user to the archival source of the data. +harvestclients.newClientDialog.harvestingStyle.required=Please select one of the values from the menu. +harvestclients.viewEditDialog.title=Edit Harvesting Client +harvestclients.viewEditDialog.archiveUrl=Archive URL +harvestclients.viewEditDialog.archiveUrl.tip=The URL of the archive that serves the data harvested by this client, which is used in search results for links to the original sources of the harvested content. +harvestclients.viewEditDialog.archiveUrl.helptext=Edit if this URL differs from the Server URL. +harvestclients.viewEditDialog.archiveDescription=Archive Description +harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival source of the harvested content, displayed in search results. +harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data. +harvestclients.viewEditDialog.btn.save=Save Changes +harvestclients.newClientDialog.title.edit=Edit Group {0} + +#harvestset.xhtml +harvestserver.title=Manage Harvesting Server +harvestserver.toptip=Define sets of local datasets that will be available for harvesting by remote clients. +harvestserver.service.label=OAI Server +harvestserver.service.enabled=Enabled +harvestserver.service.disabled=Disabled +harvestserver.service.disabled.msg=Harvesting Server is currently disabled. +harvestserver.service.empty=No sets are configured. +harvestserver.service.enable.success=OAI Service has been successfully enabled. +harvestserver.noSets.why.header=What is a Harvesting Server? +harvestserver.noSets.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting server, your Dataverse can make some of the local dataset metadata available to remote harvesting clients. These can be other Dataverse instances, or any other clients that support OAI-PMH harvesting protocol. +harvestserver.noSets.why.reason2=Only the published, unrestricted datasets in your Dataverse can be harvested. Remote clients normally keep their records in sync through scheduled incremental updates, daily or weekly, thus minimizing the load on your server. Note that it is only the metadata that are harvested. Remote harvesters will generally not attempt to download the data files themselves. +harvestserver.noSets.how.header=How to run a Harvesting Server? +harvestserver.noSets.how.tip1=Harvesting server can be enabled or disabled on this page. +harvestserver.noSets.how.tip2=Once the service is enabled, you can define collections of local datasets that will be available to remote harvesters as OAI Sets. Sets are defined by search queries (for example, authorName:king; or parentId:1234 - to select all the datasets that belong to the dataverse specified; or dsPersistentId:"doi:1234/" to select all the datasets with the persistent identifier authority specified). Consult the Search API section of the Dataverse User Guide for more information on the search queries. +harvestserver.noSets.getStarted=To get started, enable the OAI server and click on the Add Set button. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestserver.btn.add=Add Set +harvestserver.tab.header.spec=OAI setSpec +harvestserver.tab.col.spec.default=DEFAULT +harvestserver.tab.header.description=Description +harvestserver.tab.header.definition=Definition Query +harvestserver.tab.col.definition.default=All Published Local Datasets +harvestserver.tab.header.stats=Datasets +harvestserver.tab.col.stats.empty=No records (empty set) +harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted) +harvestserver.tab.header.action=Actions +harvestserver.tab.header.action.btn.export=Run Export +harvestserver.actions.runreexport.success=Successfully started an asynchronous re-export job for OAI set "{0}" (please reload the page to check on the export progress). +harvestserver.tab.header.action.btn.edit=Edit +harvestserver.tab.header.action.btn.delete=Delete +harvestserver.tab.header.action.btn.delete.dialog.header=Delete Harvesting Set +harvestserver.tab.header.action.btn.delete.dialog.tip=Are you sure you want to delete the OAI set "{0}"? You cannot undo a delete! +harvestserver.tab.header.action.delete.infomessage=Selected harvesting set is being deleted. (this may take a few moments) +harvestserver.newSetDialog.title.new=Create Harvesting Set +harvestserver.newSetDialog.help=Define a set of local datasets available for harvesting to remote clients. +harvestserver.newSetDialog.setspec=Name/OAI setSpec +harvestserver.newSetDialog.setspec.tip=A unique name (OAI setSpec) identifying this set. +harvestserver.newSetDialog.setspec.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestserver.editSetDialog.setspec.helptext=The name can not be changed once the set has been created. +harvestserver.editSetDialog.setspec.helptext.default=this is the default, unnamed set +harvestserver.newSetDialog.setspec.required=Name (OAI setSpec) cannot be empty! +harvestserver.newSetDialog.setspec.invalid=Name (OAI setSpec) can contain only letters, digits, underscores (_) and dashes (-). +harvestserver.newSetDialog.setspec.alreadyused=This set name (OAI setSpec) is already used. +harvestserver.newSetDialog.setspec.sizelimit=This set name (OAI setSpec) may be no longer than 30 characters. +harvestserver.newSetDialog.setspec.superUser.required=Only superusers may create OAI sets. +harvestserver.newSetDialog.setdescription=Description +harvestserver.newSetDialog.setdescription.tip=Provide a brief description for this OAI set. +harvestserver.newSetDialog.setdescription.required=Set description cannot be empty! +harvestserver.newSetDialog.setdescription.default=The default, "no name" set. The OAI server will serve the records from this set when no "setspec" argument is specified by the client. +harvestserver.newSetDialog.setquery=Definition Query +harvestserver.newSetDialog.setquery.tip=Search query that defines the content of the dataset. +harvestserver.newSetDialog.setquery.helptext=Example query: authorName:king +harvestserver.newSetDialog.setquery.required=Search query cannot be left empty! +harvestserver.newSetDialog.setquery.results=Search query returned {0} datasets! +harvestserver.newSetDialog.setquery.empty=WARNING: Search query returned no results! +harvestserver.newSetDialog.btn.create=Create Set +harvestserver.newSetDialog.success=Successfully created harvesting set "{0}". +harvestserver.viewEditDialog.title=Edit Harvesting Set +harvestserver.viewEditDialog.btn.save=Save Changes + +#dashboard-users.xhtml +dashboard.card.users=Users +dashboard.card.users.header=Dashboard - User List +dashboard.card.users.super=Superusers +dashboard.card.users.manage=Manage Users +dashboard.card.users.message=List and manage users. +dashboard.list_users.searchTerm.watermark=Search these users... +dashboard.list_users.tbl_header.userId=ID +dashboard.list_users.tbl_header.userIdAZ=ID (A-Z) +dashboard.list_users.tbl_header.userIdZA=ID (Z-A) +dashboard.list_users.tbl_header.userIdentifier=Username +dashboard.list_users.tbl_header.userIdentifierAZ=Username (A-Z) +dashboard.list_users.tbl_header.userIdentifierZA=Username (Z-A) +dashboard.list_users.tbl_header.name=Name +dashboard.list_users.tbl_header.lastName=Last Name +dashboard.list_users.tbl_header.lastNameAZ=Last Name (A-Z) +dashboard.list_users.tbl_header.lastNameZA=Last Name (Z-A) +dashboard.list_users.tbl_header.firstName=First Name +dashboard.list_users.tbl_header.email=Email +dashboard.list_users.tbl_header.emailAZ=Email (A-Z) +dashboard.list_users.tbl_header.emailZA=Email (Z-A) +dashboard.list_users.tbl_header.affiliation=Affiliation +dashboard.list_users.tbl_header.affiliationAZ=Affiliation (A-Z) +dashboard.list_users.tbl_header.affiliationZA=Affiliation (Z-A) +dashboard.list_users.tbl_header.roles=Roles +dashboard.list_users.tbl_header.position=Position +dashboard.list_users.tbl_header.isSuperuser=Superuser +dashboard.list_users.tbl_header.superuserAZ=Superuser (A-Z) +dashboard.list_users.tbl_header.superuserZA=Superuser (Z-A) +dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication +dashboard.list_users.tbl_header.authProviderFactoryAliasAZ=Authentication (A-Z) +dashboard.list_users.tbl_header.authProviderFactoryAliasZA=Authentication (Z-A) +dashboard.list_users.tbl_header.createdTime=Created Time +dashboard.list_users.tbl_header.lastLoginTime=Last Login Time +dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time +dashboard.list_users.tbl_header.deactivated=deactivated +dashboard.list_users.tbl_header.roles.removeAll=Remove All +dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles +dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? +dashboard.list_users.removeAll.message.success=All roles have been removed for user {0}. +dashboard.list_users.removeAll.message.failure=Failed to remove roles for user {0}. +dashboard.list_users.toggleSuperuser=Edit Superuser Status +dashboard.list_users.toggleSuperuser.confirmationText.add=Are you sure you want to enable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirmationText.remove=Are you sure you want to disable superuser status for user {0}? +dashboard.list_users.api.auth.invalid_apikey=The API key is invalid. +dashboard.list_users.api.auth.not_superuser=Forbidden. You must be a superuser. + +#dashboard-datamove.xhtml +dashboard.card.datamove=Data +dashboard.card.datamove.header=Dashboard - Move Data +dashboard.card.datamove.manage=Move Data +dashboard.card.datamove.message=Manage and curate your installation by moving datasets from one host dataverse to another. See also Managing Datasets and Dataverses in the Admin Guide. +dashboard.card.datamove.selectdataset.header=Dataset to Move +dashboard.card.datamove.newdataverse.header=New Host Dataverse +dashboard.card.datamove.dataset.label=Dataset +dashboard.card.datamove.dataverse.label=Dataverse +dashboard.card.datamove.confirm.dialog=Are you sure want to move this dataset? +dashboard.card.datamove.confirm.yes=Yes, Move Data +dashboard.card.datamove.message.success=The dataset "{0}" ({1}) has been successfully moved to {2}. +dashboard.card.datamove.message.failure.summary=Failed to moved dataset +dashboard.card.datamove.message.failure.details=The dataset "{0}" ({1}) could not be moved to {2}. {3}{4} +dashboard.card.datamove.dataverse.placeholder=Enter Dataverse Identifier... +dashboard.card.datamove.dataverse.menu.header=Dataverse Name (Affiliate), Identifier +dashboard.card.datamove.dataverse.menu.invalidMsg=No matches found +dashboard.card.datamove.dataset.placeholder=Enter Dataset Persistent ID, doi:... +dashboard.card.datamove.dataset.menu.header=Dataset Persistent ID, Title, Host Dataverse Identifier +dashboard.card.datamove.dataset.menu.invalidMsg=No matches found +dashboard.card.datamove.dataset.command.error.targetDataverseUnpublishedDatasetPublished=A published dataset may not be moved to an unpublished dataverse. You can retry the move after publishing {0}. +dashboard.card.datamove.dataset.command.error.targetDataverseSameAsOriginalDataverse=This dataset is already in this dataverse. +dashboard.card.datamove.dataset.command.error.unforced.datasetGuestbookNotInTargetDataverse=The guestbook would be removed from this dataset if you moved it because the guestbook is not in the new host dataverse. +dashboard.card.datamove.dataset.command.error.unforced.linkedToTargetDataverseOrOneOfItsParents=This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. +dashboard.card.datamove.dataset.command.error.unforced.suggestForce=Forcing this move is currently only available via API. Please see "Move a Dataset" under Managing Datasets and Dataverses in the Admin Guide for details. +dashboard.card.datamove.dataset.command.error.indexingProblem=Dataset could not be moved. Indexing failed. + +#MailServiceBean.java +notification.email.create.dataverse.subject={0}: Your dataverse has been created +notification.email.create.dataset.subject={0}: Your dataset has been created +notification.email.request.file.access.subject={0}: Access has been requested for a restricted file +notification.email.grant.file.access.subject={0}: You have been granted access to a restricted file +notification.email.rejected.file.access.subject={0}: Your request for access to a restricted file has been rejected +notification.email.submit.dataset.subject={0}: Your dataset has been submitted for review +notification.email.publish.dataset.subject={0}: Your dataset has been published +notification.email.publishFailure.dataset.subject={0}: Failed to publish your dataset +notification.email.returned.dataset.subject={0}: Your dataset has been returned +notification.email.workflow.success.subject={0}: Your dataset has been processed +notification.email.workflow.success=A workflow running on {0} (view at {1}) succeeded: {2} +notification.email.workflow.failure.subject={0}: Failed to process your dataset +notification.email.workflow.failure=A workflow running on {0} (view at {1}) failed: {2} +notification.email.workflow.nullMessage=No additional message sent from the workflow. +notification.email.create.account.subject={0}: Your account has been created +notification.email.assign.role.subject={0}: You have been assigned a role +notification.email.revoke.role.subject={0}: Your role has been revoked +notification.email.verifyEmail.subject={0}: Verify your email address +notification.email.ingestCompleted.subject={0}: Your ingest has successfully finished! +notification.email.ingestCompletedWithErrors.subject={0}: Your ingest has finished with errors! +notification.email.greeting=Hello, \n +notification.email.greeting.html=Hello,
+# Bundle file editors, please note that "notification.email.welcome" is used in a unit test +notification.email.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the User Guide at {1}/{2}/user or contact {3} at {4} for assistance. +notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page. +notification.email.requestFileAccess=File access requested for dataset: {0} by {1} ({2}). Manage permissions at {3}. +notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1}). +notification.email.rejectFileAccess=Your request for access was rejected for the requested files in the dataset: {0} (view at {1}). If you have any questions about why your request was rejected, you may reach the dataset owner using the "Contact" link on the upper right corner of the dataset page. +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test +notification.email.createDataverse=Your new dataverse named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with your dataverse, check out the Dataverse Management - User Guide at {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test +notification.email.createDataset=Your new dataset named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with a dataset, check out the Dataset Management - User Guide at {4}/{5}/user/dataset-management.html . +notification.email.wasSubmittedForReview={0} (view at {1}) was submitted for review to be published in {2} (view at {3}). Don''t forget to publish it or send it back to the contributor, {4} ({5})\! +notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}). +notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}). +notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. +#notification.email.closing=\n\nYou may contact us for support at {0}.\n\nThank you,\n{1} +#UB +notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1} + +notification.email.closing.html=

You may contact us for support at {0}.

Thank you,
{1} +notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}). +notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}). +notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance. +notification.email.passwordReset=Hi {0},\n\nSomeone, hopefully you, requested a password reset for {1}.\n\nPlease click the link below to reset your Dataverse account password:\n\n {2} \n\n The link above will only work for the next {3} minutes.\n\n Please contact us if you did not request this password reset or need further help. +notification.email.passwordReset.subject=Dataverse Password Reset Requested +hours=hours +hour=hour +minutes=minutes +minute=minute +notification.email.checksumfail.subject={0}: Your upload failed checksum validation +notification.email.import.filesystem.subject=Dataset {0} has been successfully uploaded and verified +notification.email.import.checksum.subject={0}: Your file checksum job has completed +contact.delegation={0} on behalf of {1} +contact.delegation.default_personal=Dataverse Installation Admin +notification.email.info.unavailable=Unavailable +notification.email.apiTokenGenerated=Hello {0} {1},\n\nAPI Token has been generated. Please keep it secure as you would do with a password. +notification.email.apiTokenGenerated.subject=API Token was generated + +# dataverse.xhtml +dataverse.name=Dataverse Name +dataverse.name.title=The project, department, university, professor, or journal this dataverse will contain data for. +dataverse.enterName=Enter name... +dataverse.host.title=The dataverse which contains this data. +dataverse.host.tip=Changing the host dataverse will clear any fields you may have entered data into. +dataverse.host.autocomplete.nomatches=No matches +dataverse.identifier.title=Short name used for the URL of this dataverse. +dataverse.affiliation.title=The organization with which this dataverse is affiliated. +dataverse.storage.title=A storage service to be used for datasets in this dataverse. +dataverse.category=Category +dataverse.category.title=The type that most closely reflects this dataverse. +dataverse.type.selectTab.top=Select one... +dataverse.type.selectTab.researchers=Researcher +dataverse.type.selectTab.researchProjects=Research Project +dataverse.type.selectTab.journals=Journal +dataverse.type.selectTab.organizationsAndInsitutions=Organization or Institution +dataverse.type.selectTab.teachingCourses=Teaching Course +dataverse.type.selectTab.uncategorized=Uncategorized +dataverse.type.selectTab.researchGroup=Research Group +dataverse.type.selectTab.laboratory=Laboratory +dataverse.type.selectTab.department=Department +dataverse.description.title=A summary describing the purpose, nature, or scope of this dataverse. +dataverse.email=Email +dataverse.email.title=The e-mail address(es) of the contact(s) for the dataverse. +dataverse.share.dataverseShare=Share Dataverse +dataverse.share.dataverseShare.tip=Share this dataverse on your favorite social media networks. +dataverse.share.dataverseShare.shareText=View this dataverse. +dataverse.subject.title=Subject(s) covered in this dataverse. +dataverse.metadataElements=Metadata Fields +dataverse.metadataElements.tip=Choose the metadata fields to use in dataset templates and when adding a dataset to this dataverse. +dataverse.metadataElements.from.tip=Use metadata fields from {0} +dataverse.resetModifications=Reset Modifications +dataverse.resetModifications.text=Are you sure you want to reset the selected metadata fields? If you do this, any customizations (hidden, required, optional) you have done will no longer appear. +dataverse.field.required=(Required) +dataverse.field.example1= (Examples: +dataverse.field.example2=) +dataverse.field.set.tip=[+] View fields + set as hidden, required, or optional +dataverse.field.set.view=[+] View fields +dataverse.field.requiredByDataverse=Required by Dataverse +dataverse.facetPickList.text=Browse/Search Facets +dataverse.facetPickList.tip=Choose the metadata fields to use as facets for browsing datasets and dataverses in this dataverse. +dataverse.facetPickList.facetsFromHost.text=Use browse/search facets from {0} +dataverse.facetPickList.metadataBlockList.all=All Metadata Fields +dataverse.edit=Edit +dataverse.option.generalInfo=General Information +dataverse.option.themeAndWidgets=Theme + Widgets +dataverse.option.featuredDataverse=Featured Dataverses +dataverse.option.permissions=Permissions +dataverse.option.dataverseGroups=Groups +dataverse.option.datasetTemplates=Dataset Templates +dataverse.option.datasetGuestbooks=Dataset Guestbooks +dataverse.option.deleteDataverse=Delete Dataverse +dataverse.publish.btn=Publish +dataverse.publish.header=Publish Dataverse +dataverse.nopublished=No Published Dataverses +dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. +dataverse.contact=Email Dataverse Contact +dataverse.link=Link Dataverse +dataverse.link.btn.tip=Link to Your Dataverse +dataverse.link.yourDataverses=Your Dataverse +dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name +dataverse.link.save=Save Linked Dataverse +dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. +dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. +dataverse.link.dataset.none=No linkable dataverses available. +dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. +dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. +dataverse.savedsearch.link=Link Search +dataverse.savedsearch.searchquery=Search +dataverse.savedsearch.filterQueries=Facets +dataverse.savedsearch.save=Save Linked Search +dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. +dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test +dataverse.saved.search.success=The saved search has been successfully linked to {0}. +dataverse.saved.search.failure=The saved search was not able to be linked. +dataverse.linked.success= {0} has been successfully linked to {1}. +dataverse.linked.success.wait= {0} has been successfully linked to {1}. Please wait for its contents to appear. +dataverse.linked.internalerror={0} has been successfully linked to {1} but contents will not appear until an internal error has been fixed. +dataverse.linked.error.alreadyLinked={0} has already been linked to {1}. +dataverse.page.pre=Previous +dataverse.page.next=Next +dataverse.byCategory=Dataverses by Category +dataverse.displayFeatured=Display the dataverses selected below on the landing page of this dataverse. +dataverse.selectToFeature=Select dataverses to feature on the landing page of this dataverse. +dataverse.publish.tip=Are you sure you want to publish your dataverse? Once you do so it must remain published. +dataverse.publish.failed.tip=This dataverse cannot be published because the dataverse it is in has not been published. +dataverse.publish.failed=Cannot publish dataverse. +dataverse.publish.success=Your dataverse is now public. +dataverse.publish.failure=This dataverse was not able to be published. +dataverse.delete.tip=Are you sure you want to delete your dataverse? You cannot undelete this dataverse. +dataverse.delete=Delete Dataverse +dataverse.delete.success=Your dataverse has been deleted. +dataverse.delete.failure=This dataverse was not able to be deleted. +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters +dataverse.create.success=You have successfully created your dataverse! To learn more about what you can do with your dataverse, check out the User Guide. +dataverse.create.failure=This dataverse was not able to be created. +dataverse.create.authenticatedUsersOnly=Only authenticated users can create dataverses. +dataverse.update.success=You have successfully updated your dataverse! +dataverse.update.failure=This dataverse was not able to be updated. +dataverse.selected=Selected +dataverse.listing.error=Fatal error trying to list the contents of the dataverse. Please report this error to the Dataverse administrator. +dataverse.datasize=Total size of the files stored in this dataverse: {0} bytes +dataverse.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataverse. Please report this error to the Dataverse administrator. +dataverse.storage.inherited=(inherited from enclosing Dataverse) +dataverse.storage.default=(Default) +# rolesAndPermissionsFragment.xhtml + +# advanced.xhtml +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip=The project, department, university, professor, or journal this Dataverse will contain data for. +advanced.search.dataverses.affiliation.tip=The organization with which this Dataverse is affiliated. +advanced.search.dataverses.description.tip=A summary describing the purpose, nature, or scope of this Dataverse. +advanced.search.dataverses.subject.tip=Domain-specific Subject Categories that are topically relevant to this Dataverse. +advanced.search.header.datasets=Datasets +advanced.search.header.files=Files +advanced.search.files.name.tip=The name given to identify the file. +advanced.search.files.description.tip=A summary describing the file and its variables. +advanced.search.files.persistentId.tip=The persistent identifier for the file. +advanced.search.files.persistentId=Data File Persistent ID +advanced.search.files.persistentId.tip=The unique persistent identifier for a data file, which can be a Handle or DOI in Dataverse. +advanced.search.files.fileType=File Type +advanced.search.files.fileType.tip=The file type, e.g. Comma Separated Values, Plain Text, R, etc. +advanced.search.files.variableName=Variable Name +advanced.search.files.variableName.tip=The name of the variable's column in the data frame. +advanced.search.files.variableLabel=Variable Label +advanced.search.files.variableLabel.tip=A short description of the variable. +advanced.search.datasets.persistentId.tip=The persistent identifier for the dataset. +advanced.search.datasets.persistentId=Dataset Persistent ID +advanced.search.datasets.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +advanced.search.files.fileTags=File Tags +advanced.search.files.fileTags.tip=Terms such "Documentation", "Data", or "Code" that have been applied to files. + +# search +search.datasets.literalquestion=Text of the actual, literal question asked. +search.datasets.interviewinstructions=Specific instructions to the individual conducting an interview. +search.datasets.postquestion=Text describing what occurs after the literal question has been asked. +search.datasets.variableuniverse=The group of persons or other elements that are the object of research and to which any analytic results refer. +search.datasets.variableNotes=For clarifying information/annotation regarding the variable. + +# search-include-fragment.xhtml +dataverse.search.advancedSearch=Advanced Search +dataverse.search.input.watermark=Search this dataverse... +account.search.input.watermark=Search this data... +dataverse.search.btn.find=Find +dataverse.results.btn.addData=Add Data +dataverse.results.btn.addData.newDataverse=New Dataverse +dataverse.results.btn.addData.newDataset=New Dataset +dataverse.results.dialog.addDataGuest.header=Add Data +dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset. +#dataverse.results.dialog.addDataGuest.msg.signup=Sign up or log in to create a dataverse or add a dataset. +#UB: +dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset. + +dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account +dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets=Datasets +dataverse.results.types.files=Files +dataverse.results.btn.filterResults=Filter Results +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero=There are no dataverses, datasets, or files that match your search. Please try a new search by using other or broader terms. You can also check out the search guide for tips. +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden=There are no search results based on how you have narrowed your search. You can check out the search guide for tips. +dataverse.results.empty.browse.guest.zero=This dataverse currently has no dataverses, datasets, or files. Please log in to see if you are able to add to it. +dataverse.results.empty.browse.guest.hidden=There are no dataverses within this dataverse. Please log in to see if you are able to add to it. +dataverse.results.empty.browse.loggedin.noperms.zero=This dataverse currently has no dataverses, datasets, or files. You can use the Email Dataverse Contact button above to ask about this dataverse or request access for this dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden=There are no dataverses within this dataverse. +dataverse.results.empty.browse.loggedin.perms.zero=This dataverse currently has no dataverses, datasets, or files. You can add to it by using the Add Data button on this page. +account.results.empty.browse.loggedin.perms.zero=You have no dataverses, datasets, or files associated with your account. You can add a dataverse or dataset by clicking the Add Data button above. Read more about adding data in the User Guide. +dataverse.results.empty.browse.loggedin.perms.hidden=There are no dataverses within this dataverse. You can add to it by using the Add Data button on this page. +dataverse.results.empty.link.technicalDetails=More technical details +dataverse.search.facet.error=There was an error with your search parameters. Please clear your search and try again. +dataverse.results.count.toofresults={0} to {1} of {2} {2, choice, 0#Results|1#Result|2#Results} +dataverse.results.paginator.current=(Current) +dataverse.results.btn.sort=Sort +dataverse.results.btn.sort.option.nameAZ=Name (A-Z) +dataverse.results.btn.sort.option.nameZA=Name (Z-A) +dataverse.results.btn.sort.option.newest=Newest +dataverse.results.btn.sort.option.oldest=Oldest +dataverse.results.btn.sort.option.relevance=Relevance +dataverse.results.cards.foundInMetadata=Found in Metadata Fields: +dataverse.results.cards.files.tabularData=Tabular Data +dataverse.results.solrIsDown=Please note: Due to an internal error, browsing and searching is not available. +dataverse.theme.title=Theme +dataverse.theme.inheritCustomization.title=For this dataverse, use the same theme as the parent dataverse. +dataverse.theme.inheritCustomization.label=Inherit Theme +dataverse.theme.inheritCustomization.checkbox=Inherit theme from {0} +dataverse.theme.logo=Logo +dataverse.theme.logo.tip=Supported image types are JPG, TIF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. +dataverse.theme.logo.format=Logo Format +dataverse.theme.logo.format.selectTab.square=Square +dataverse.theme.logo.format.selectTab.rectangle=Rectangle +dataverse.theme.logo.alignment=Logo Alignment +dataverse.theme.logo.alignment.selectTab.left=Left +dataverse.theme.logo.alignment.selectTab.center=Center +dataverse.theme.logo.alignment.selectTab.right=Right +dataverse.theme.logo.backColor=Logo Background Color +dataverse.theme.logo.image.upload=Upload Image +dataverse.theme.tagline=Tagline +dataverse.theme.website=Website +dataverse.theme.linkColor=Link Color +dataverse.theme.txtColor=Text Color +dataverse.theme.backColor=Background Color +dataverse.theme.success=You have successfully updated the theme for this dataverse! +dataverse.theme.failure=The dataverse theme has not been updated. +dataverse.theme.logo.image=Logo Image +dataverse.theme.logo.imageFooter=Footer Image +dataverse.theme.logo.image.title=The logo or image file you wish to display in the header of this dataverse. +dataverse.theme.logo.image.footer=The logo or image file you wish to display in the footer of this dataverse. +dataverse.theme.logo.image.uploadNewFile=Upload New File +dataverse.theme.logo.image.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataverse.theme.logo.image.uploadImgFile=Upload Image File +dataverse.theme.logo.format.title=The shape for the logo or image file you upload for this dataverse. +dataverse.theme.logo.alignment.title=Where the logo or image should display in the header or footer. +dataverse.theme.logo.backColor.title=Select a color to display in the header or footer of this dataverse. +dataverse.theme.headerColor=Header Colors +dataverse.theme.headerColor.tip=Colors you select to style the header of this dataverse. +dataverse.theme.backColor.title=Color for the header area that contains the image, tagline, URL, and text. +dataverse.theme.linkColor.title=Color for the link to display as. +dataverse.theme.txtColor.title=Color for the tagline text and the name of this dataverse. +dataverse.theme.tagline.title=A phrase or sentence that describes this dataverse. +dataverse.theme.tagline.tip=Provide a tagline that is 140 characters or less. +dataverse.theme.website.title=URL for your personal website, institution, or any website that relates to this dataverse. +dataverse.theme.website.tip=The website will be linked behind the tagline. To have a website listed, you must also provide a tagline. +dataverse.theme.website.watermark=Your personal site, http://... +dataverse.theme.website.invalidMsg=Invalid URL. +dataverse.theme.disabled=The theme for the root dataverse has been administratively disabled with the :DisableRootDataverseTheme database setting. +dataverse.widgets.title=Widgets +dataverse.widgets.notPublished.why.header=Why Use Widgets? +dataverse.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataverse.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataverse.widgets.notPublished.how.header=How To Use Widgets +dataverse.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataverse.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataverse.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataverse.widgets.notPublished.getStarted=To get started, publish your dataverse. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.searchBox.txt=Dataverse Search Box +dataverse.widgets.searchBox.tip=Add a way for visitors on your website to be able to search Dataverse. +dataverse.widgets.dataverseListing.txt=Dataverse Listing +dataverse.widgets.dataverseListing.tip=Add a way for visitors on your website to be able to view your dataverses and datasets, sort, or browse through them. +dataverse.widgets.advanced.popup.header=Widget Advanced Options +dataverse.widgets.advanced.prompt=Forward dataset citation persistent URL's to your personal website. The page you submit as your Personal Website URL must contain the code snippet for the Dataverse Listing widget. +dataverse.widgets.advanced.url.label=Personal Website URL +dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name +dataverse.widgets.advanced.invalid.message=Please enter a valid URL +dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. + +# permissions-manage.xhtml +dataverse.permissions.title=Permissions +dataverse.permissions.dataset.title=Dataset Permissions +dataverse.permissions.access.accessBtn=Edit Access +dataverse.permissions.usersOrGroups=Users/Groups +dataverse.permissions.requests=Requests +dataverse.permissions.usersOrGroups.assignBtn=Assign Roles to Users/Groups +dataverse.permissions.usersOrGroups.createGroupBtn=Create Group +dataverse.permissions.usersOrGroups.description=All the users and groups that have access to your dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissions.usersOrGroups.tabHeader.id=ID +dataverse.permissions.usersOrGroups.tabHeader.role=Role +dataverse.permissions.usersOrGroups.tabHeader.action=Action +dataverse.permissions.usersOrGroups.assignedAt=Role assigned at {0} +dataverse.permissions.usersOrGroups.removeBtn=Remove Assigned Role +dataverse.permissions.usersOrGroups.removeBtn.confirmation=Are you sure you want to remove this role assignment? +dataverse.permissions.roles=Roles +dataverse.permissions.roles.add=Add New Role +dataverse.permissions.roles.description=All the roles set up in your dataverse, that you can assign to users and groups. +dataverse.permissions.roles.edit=Edit Role +dataverse.permissions.roles.copy=Copy Role +dataverse.permissions.roles.alias.required=Please enter a unique identifier for this role. +dataverse.permissions.roles.name.required=Please enter a name for this role. + +# permissions-manage-files.xhtml +dataverse.permissionsFiles.title=Restricted File Permissions +dataverse.permissionsFiles.usersOrGroups=Users/Groups +dataverse.permissionsFiles.usersOrGroups.assignBtn=Grant Access to Users/Groups +dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to restricted files in this dataset. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email +dataverse.permissionsFiles.usersOrGroups.tabHeader.authentication=Authentication +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Files +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Access +dataverse.permissionsFiles.usersOrGroups.file=File +dataverse.permissionsFiles.usersOrGroups.files=Files +dataverse.permissionsFiles.usersOrGroups.invalidMsg=There are no users or groups with access to the restricted files in this dataset. +dataverse.permissionsFiles.files=Restricted Files +dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files} +dataverse.permissionsFiles.files.description=All the restricted files in this dataset. +dataverse.permissionsFiles.files.tabHeader.fileName=File Name +dataverse.permissionsFiles.files.tabHeader.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.tabHeader.access=Access +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Published +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Draft +dataverse.permissionsFiles.files.deleted=Deleted +dataverse.permissionsFiles.files.public=Public +dataverse.permissionsFiles.files.restricted=Restricted +dataverse.permissionsFiles.files.roleAssignee=User/Group +dataverse.permissionsFiles.files.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups} +dataverse.permissionsFiles.files.assignBtn=Assign Access +dataverse.permissionsFiles.files.invalidMsg=There are no restricted files in this dataset. +dataverse.permissionsFiles.files.requested=Requested Files +dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2} +dataverse.permissionsFiles.files.includeDeleted=Include Deleted Files +dataverse.permissionsFiles.viewRemoveDialog.header=File Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Remove Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=Are you sure you want to remove access to this file? Once access has been removed, the user or group will no longer be able to download this file. +dataverse.permissionsFiles.assignDialog.header=Grant File Access +dataverse.permissionsFiles.assignDialog.description=Grant file access to users and groups. +dataverse.permissionsFiles.assignDialog.userOrGroup=Users/Groups +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissionsFiles.assignDialog.fileName=File Name +dataverse.permissionsFiles.assignDialog.grantBtn=Grant +dataverse.permissionsFiles.assignDialog.rejectBtn=Reject + +# permissions-configure.xhtml +dataverse.permissions.accessDialog.header=Edit Access +dataverse.permissions.description=Current access configuration to your dataverse. +dataverse.permissions.tip=Select if all users or only certain users are able to add to this dataverse, by clicking the Edit Access button. +dataverse.permissions.Q1=Who can add to this dataverse? +dataverse.permissions.Q1.answer1=Anyone adding to this dataverse needs to be given access +dataverse.permissions.Q1.answer2=Anyone with a Dataverse account can add sub dataverses +dataverse.permissions.Q1.answer3=Anyone with a Dataverse account can add datasets +dataverse.permissions.Q1.answer4=Anyone with a Dataverse account can add sub dataverses and datasets +dataverse.permissions.Q2=When a user adds a new dataset to this dataverse, which role should be automatically assigned to them on that dataset? +dataverse.permissions.Q2.answer.editor.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, Submit datasets for review +dataverse.permissions.Q2.answer.manager.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use) +dataverse.permissions.Q2.answer.curator.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use), Edit Permissions/Assign Roles + Publish +permission.anyoneWithAccount=Anyone with a Dataverse account + +# roles-assign.xhtml +dataverse.permissions.usersOrGroups.assignDialog.header=Assign Role +dataverse.permissions.usersOrGroups.assignDialog.description=Grant permissions to users and groups by assigning them a role. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Users/Groups +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissions.usersOrGroups.assignDialog.role.description=These are the permissions associated with the selected role. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Please select a role to assign. + +# roles-edit.xhtml +dataverse.permissions.roles.header=Edit Role +dataverse.permissions.roles.name=Role Name +dataverse.permissions.roles.name.title=Enter a name for the role. +dataverse.permissions.roles.id=Identifier +dataverse.permissions.roles.id.title=Enter a name for the alias. +dataverse.permissions.roles.description.title=Describe the role (1000 characters max). +dataverse.permissions.roles.description.counter={0} characters remaining +dataverse.permissions.roles.roleList.header=Role Permissions +dataverse.permissions.roles.roleList.authorizedUserOnly=Permissions with an asterisk icon indicate actions that can be performed by users not logged into Dataverse. + +# explicitGroup-new-dialog.xhtml +dataverse.permissions.explicitGroupEditDialog.title.new=Create Group +dataverse.permissions.explicitGroupEditDialog.title.edit=Edit Group {0} +dataverse.permissions.explicitGroupEditDialog.help=Add users or other groups to this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Group Identifier +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=Group identifier cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=Group identifier can contain only letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consists of letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=Group identifier already used in this dataverse +dataverse.permissions.explicitGroupEditDialog.groupName=Group Name +dataverse.permissions.explicitGroupEditDialog.groupName.required=Group name cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupDescription=Description +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=User/Group +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Users/Groups +dataverse.permissions.explicitGroupEditDialog.createGroup=Create Group + +# manage-templates.xhtml +dataset.manageTemplates.pageTitle=Manage Dataset Templates +dataset.manageTemplates.select.txt=Include Templates from {0} +dataset.manageTemplates.createBtn=Create Dataset Template +dataset.manageTemplates.saveNewTerms=Save Dataset Template +dataset.manageTemplates.noTemplates.why.header=Why Use Templates? +dataset.manageTemplates.noTemplates.why.reason1=Templates are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in. +dataset.manageTemplates.noTemplates.why.reason2=Templates can be used to input instructions for those uploading datasets into your dataverse if you have a specific way you want a metadata field to be filled out. +dataset.manageTemplates.noTemplates.how.header=How To Use Templates +dataset.manageTemplates.noTemplates.how.tip1=Templates are created at the dataverse level, can be deleted (so it does not show for future datasets), set to default (not required), and can be copied so you do not have to start over when creating a new template with similar metadata from another template. When a template is deleted, it does not impact the datasets that have used the template already. +dataset.manageTemplates.noTemplates.how.tip2=Please note that the ability to choose which metadata fields are hidden, required, or optional is done on the General Information page for this dataverse. +dataset.manageTemplates.noTemplates.getStarted=To get started, click on the Create Dataset Template button above. To learn more about templates, visit the Dataset Templates section of the User Guide. +dataset.manageTemplates.tab.header.templte=Template Name +dataset.manageTemplates.tab.header.date=Date Created +dataset.manageTemplates.tab.header.usage=Usage +dataset.manageTemplates.tab.header.action=Action +dataset.manageTemplates.tab.action.btn.makeDefault=Make Default +dataset.manageTemplates.tab.action.btn.default=Default +dataset.manageTemplates.tab.action.btn.view=View +dataset.manageTemplates.tab.action.btn.copy=Copy +dataset.manageTemplates.tab.action.btn.edit=Edit +dataset.manageTemplates.tab.action.btn.edit.metadata=Metadata +dataset.manageTemplates.tab.action.btn.edit.terms=Terms +dataset.manageTemplates.tab.action.btn.delete=Delete +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this template? A new dataset will not be able to use this template. +dataset.manageTemplates.tab.action.btn.delete.dialog.header=Delete Template +dataset.manageTemplates.tab.action.btn.view.dialog.header=Dataset Template Preview +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Dataset Template +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=The dataset template which prepopulates info into the form automatically. +dataset.manageTemplates.tab.action.noedit.createdin=Template created at {0} +dataset.manageTemplates.delete.usedAsDefault=This template is the default template for the following dataverse(s). It will be removed as default as well. +dataset.message.manageTemplates.label=Manage Dataset Templates +dataset.message.manageTemplates.message=Create a template prefilled with metadata fields standard values, such as Author Affiliation, or add instructions in the metadata fields to give depositors more information on what metadata is expected. + +# metadataFragment.xhtml +dataset.anonymized.withheld=withheld + +# template.xhtml +dataset.template.name.tip=The name of the dataset template. +dataset.template.returnBtn=Return to Manage Templates +dataset.template.name.title=Enter a unique name for the template. +template.asterisk.tip=Asterisks indicate metadata fields that users will be required to fill out while adding a dataset to this dataverse. +dataset.template.popup.create.title=Create Template +dataset.template.popup.create.text=Do you want to add default Terms of Use and/or Access? +dataset.create.add.terms=Save + Add Terms + +# manage-groups.xhtml +dataverse.manageGroups.pageTitle=Manage Dataverse Groups +dataverse.manageGroups.createBtn=Create Group +dataverse.manageGroups.noGroups.why.header=Why Use Groups? +dataverse.manageGroups.noGroups.why.reason1=Groups allow you to assign roles and permissions for many users at once. +dataverse.manageGroups.noGroups.why.reason2=You can use groups to manage multiple different kinds of users (students, collaborators, etc.) +dataverse.manageGroups.noGroups.how.header=How To Use Groups +dataverse.manageGroups.noGroups.how.tip1=A group can contain both users and other groups. +dataverse.manageGroups.noGroups.how.tip2=You can assign permissions to a group in the "Permissions" view. +dataverse.manageGroups.noGroups.getStarted=To get started, click on the Create Group button above. +dataverse.manageGroups.tab.header.name=Group Name +dataverse.manageGroups.tab.header.id=Group ID +dataverse.manageGroups.tab.header.membership=Membership +dataverse.manageGroups.tab.header.action=Action +dataverse.manageGroups.tab.action.btn.view=View +dataverse.manageGroups.tab.action.btn.copy=Copy +dataverse.manageGroups.tab.action.btn.enable=Enable +dataverse.manageGroups.tab.action.btn.disable=Disable +dataverse.manageGroups.tab.action.btn.edit=Edit +dataverse.manageGroups.tab.action.btn.viewCollectedData=View Collected Data +dataverse.manageGroups.tab.action.btn.delete=Delete +dataverse.manageGroups.tab.action.btn.delete.dialog.header=Delete Group +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this group? You cannot undelete a group. +dataverse.manageGroups.tab.action.btn.view.dialog.header=Dataverse Group +dataverse.manageGroups.tab.action.btn.view.dialog.group=Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Member Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Member Type +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Action +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Delete +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Group Members +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Enter User/Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=No matches found. + +# manage-guestbooks.xhtml +dataset.manageGuestbooks.pageTitle=Manage Dataset Guestbooks +dataset.manageGuestbooks.include=Include Guestbooks from {0} +dataset.manageGuestbooks.createBtn=Create Dataset Guestbook +dataset.manageGuestbooks.download.all.responses=Download All Responses +dataset.manageGuestbooks.download.responses=Download Responses +dataset.manageGuestbooks.noGuestbooks.why.header=Why Use Guestbooks? +dataset.manageGuestbooks.noGuestbooks.why.reason1=Guestbooks allow you to collect data about who is downloading the files from your datasets. You can decide to collect account information (username, given name & last name, affiliation, etc.) as well as create custom questions (e.g., What do you plan to use this data for?). +dataset.manageGuestbooks.noGuestbooks.why.reason2=You can download the data collected from the enabled guestbooks to be able to store it outside of Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header=How To Use Guestbooks +dataset.manageGuestbooks.noGuestbooks.how.tip1=A guestbook can be used for multiple datasets but only one guestbook can be used for a dataset. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Custom questions can have free form text answers or have a user select an answer from several options. +dataset.manageGuestbooks.noGuestbooks.getStarted=To get started, click on the Create Dataset Guestbook button above. To learn more about Guestbooks, visit the Dataset Guestbook section of the User Guide. +dataset.manageGuestbooks.tab.header.name=Guestbook Name +dataset.manageGuestbooks.tab.header.date=Date Created +dataset.manageGuestbooks.tab.header.usage=Usage +dataset.manageGuestbooks.tab.header.responses=Responses +dataset.manageGuestbooks.tab.header.action=Action +dataset.manageGuestbooks.tab.action.btn.view=Preview +dataset.manageGuestbooks.tab.action.btn.copy=Copy +dataset.manageGuestbooks.tab.action.btn.enable=Enable +dataset.manageGuestbooks.tab.action.btn.disable=Disable +dataset.manageGuestbooks.tab.action.btn.edit=Edit +dataset.manageGuestbooks.tab.action.btn.preview=Preview +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=View Responses +dataset.manageGuestbooks.tab.action.btn.delete=Delete +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Delete Guestbook +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this guestbook? You cannot undelete a guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Preview Guestbook +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Upon downloading files the guestbook asks for the following information. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Guestbook Name +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Dataset Guestbook Collected Data +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=User data collected by the guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Collected Data +dataset.manageGuestbooks.tab.action.noedit.createdin=Guestbook created at {0} +dataset.manageGuestbooks.message.deleteSuccess=The guestbook has been deleted. +dataset.manageGuestbooks.message.deleteFailure=The guestbook cannot be deleted. +dataset.manageGuestbooks.message.editSuccess=The guestbook has been updated. +dataset.manageGuestbooks.message.editFailure=The guestbook could not be updated. +dataset.manageGuestbooks.message.enableSuccess=The guestbook has been enabled. +dataset.manageGuestbooks.message.enableFailure=The guestbook could not be enabled. +dataset.manageGuestbooks.message.disableSuccess=The guestbook has been disabled. +dataset.manageGuestbooks.message.disableFailure=The guestbook could not be disabled. +dataset.manageGuestbooks.tip.title=Manage Dataset Guestbooks +dataset.manageGuestbooks.tip.downloadascsv=Click \"Download All Responses\" to download all collected guestbook responses for this dataverse, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.dataset=Dataset +dataset.guestbooksResponses.date=Date +dataset.guestbooksResponses.type=Type +dataset.guestbooksResponses.file=File +dataset.guestbooksResponses.customQuestions=Custom Questions +dataset.guestbooksResponses.user=User +dataset.guestbooksResponses.tip.title=Guestbook Responses +dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.count.toofresults={0} to {1} of {2} {2, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.tip.downloadascsv=Click \"Download Responses\" to download all collected responses for this guestbook, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.tooManyResponses.message=Note: this guestbook has too many responses to display on this page. Only the most recent {0} responses are shown below. Click \"Download Responses\" to download all collected responses ({1} total) as a CSV file. + +# guestbook-responses.xhtml +dataset.guestbookResponses.pageTitle=Guestbook Responses + +# guestbook.xhtml +dataset.manageGuestbooks.guestbook.name=Guestbook Name +dataset.manageGuestbooks.guestbook.name.tip=Enter a unique name for this Guestbook. +dataset.manageGuestbooks.guestbook.dataCollected=Data Collected +dataset.manageGuestbooks.guestbook.dataCollected.description=Dataverse account information that will be collected when a user downloads a file. Check the ones that will be required. +dataset.manageGuestbooks.guestbook.customQuestions=Custom Questions +dataset.manageGuestbooks.guestbook.accountInformation=Account Information +dataset.manageGuestbooks.guestbook.required=(Required) +dataset.manageGuestbooks.guestbook.optional=(Optional) +dataset.manageGuestbooks.guestbook.customQuestions.description=Create your own questions to have users provide more than their account information when they download a file. Questions can be required or optional and answers can be text or multiple choice. +dataset.manageGuestbooks.guestbook.customQuestions.questionType=Question Type +dataset.manageGuestbooks.guestbook.customQuestions.questionText=Question Text +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Response Options +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Text +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=Multiple Choice + +# guestbookResponseFragment.xhtml +dataset.guestbookResponse.guestbook.additionalQuestions=Additional Questions +dataset.guestbookResponse.showPreview.errorMessage=Can't show preview. +dataset.guestbookResponse.showPreview.errorDetail=Couldn't write guestbook response. + +# dataset.xhtml +dataset.configureBtn=Configure +dataset.pageTitle=Add New Dataset + +dataset.accessBtn=Access Dataset +dataset.accessBtn.header.download=Download Options +dataset.accessBtn.header.explore=Explore Options +dataset.accessBtn.header.compute=Compute Options +dataset.accessBtn.download.size=ZIP ({0}) +dataset.accessBtn.too.big=The dataset is too large to download. Please select the files you need from the files table. +dataset.accessBtn.original.too.big=The dataset is too large to download in the original format. Please select the files you need from the files table. +dataset.accessBtn.archival.too.big=The dataset is too large to download in the archival format. Please select the files you need from the files table. +dataset.linkBtn=Link Dataset +dataset.contactBtn=Contact Owner +dataset.shareBtn=Share + +dataset.publishBtn=Publish Dataset +dataset.editBtn=Edit Dataset + +dataset.editBtn.itemLabel.upload=Files (Upload) +dataset.editBtn.itemLabel.metadata=Metadata +dataset.editBtn.itemLabel.terms=Terms +dataset.editBtn.itemLabel.permissions=Permissions +dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets +dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.permissionsDataset=Dataset +dataset.editBtn.itemLabel.permissionsFile=Restricted Files +dataset.editBtn.itemLabel.deleteDataset=Delete Dataset +dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version +dataset.editBtn.itemLabel.deaccession=Deaccession Dataset +dataset.exportBtn=Export Metadata +dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.dublinCore=Dublin Core +dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD +dataset.exportBtn.itemLabel.datacite=DataCite +dataset.exportBtn.itemLabel.json=JSON +dataset.exportBtn.itemLabel.oai_ore=OAI_ORE +dataset.exportBtn.itemLabel.dataciteOpenAIRE=OpenAIRE +dataset.exportBtn.itemLabel.html=DDI HTML Codebook +metrics.title=Metrics +metrics.title.tip=View more metrics information +metrics.dataset.title=Dataset Metrics +metrics.dataset.tip.default=Aggregated metrics for this dataset. +metrics.dataset.tip.makedatacount=Metrics collected using Make Data Count standards. +metrics.dataset.views.tip=Dataset views are combined with both aggregated file views and file downloads. +metrics.dataset.downloads.default.tip=Total aggregated downloads of files in this dataset. +metrics.dataset.downloads.makedatacount.tip=Each file downloaded is counted as 1, and added to the total download count. +metrics.dataset.citations.tip=Click for a list of citation URLs. +metrics.file.title=File Metrics +metrics.file.tip.default=Metrics for this individual file. +metrics.file.tip.makedatacount=Individual file downloads are tracked in Dataverse but are not reported as part of the Make Data Count standard. +metrics.file.downloads.tip=Total downloads of this file. +metrics.views={0, choice, 0#Views|1#View|2#Views} +metrics.downloads={0, choice, 0#Downloads|1#Download|2#Downloads} +metrics.citations={0, choice, 0#Citations|1#Citation|2#Citations} +metrics.citations.dialog.header=Dataset Citations +metrics.citations.dialog.help=Citations for this dataset are retrieved from Crossref via DataCite using Make Data Count standards. For more information about dataset metrics, please refer to the User Guide. +metrics.citations.dialog.empty=Sorry, no citations were found. +dataset.publish.btn=Publish +dataset.publish.header=Publish Dataset +dataset.rejectBtn=Return to Author +dataset.submitBtn=Submit for Review +dataset.disabledSubmittedBtn=Submitted for Review +dataset.submitMessage=You will not be able to make changes to this dataset while it is in review. +dataset.submit.success=Your dataset has been submitted for review. +dataset.inreview.infoMessage=The draft version of this dataset is currently under review prior to publication. +dataset.submit.failure=Dataset Submission Failed - {0} +dataset.submit.failure.null=Can't submit for review. Dataset is null. +dataset.submit.failure.isReleased=Latest version of dataset is already released. Only draft versions can be submitted for review. +dataset.submit.failure.inReview=You cannot submit this dataset for review because it is already in review. +dataset.rejectMessage=Return this dataset to contributor for modification. +dataset.rejectMessage.label=Return to Author Reason +dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). +dataset.reject.enterReason.error=Reason for return to author is required. +dataset.reject.success=This dataset has been sent back to the contributor. +dataset.reject.failure=Dataset Submission Return Failed - {0} +dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null. +dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first. +dataset.publish.tip=Are you sure you want to publish this dataset? Once you do so it must remain published. +dataset.publishBoth.tip=Once you publish this dataset it must remain published. +dataset.unregistered.tip= This dataset is unregistered. We will attempt to register it before publishing. +dataset.republish.tip=Are you sure you want to republish this dataset? +dataset.selectVersionNumber=Select if this is a minor or major version update. +dataset.updateRelease=Update Current Version (will permanently overwrite the latest published version) +dataset.majorRelease=Major Release +dataset.minorRelease=Minor Release +dataset.majorRelease.tip=Due to the nature of changes to the current draft this will be a major release ({0}) +dataset.mayNotBePublished=Cannot publish dataset. +dataset.mayNotPublish.administrator= This dataset cannot be published until {0} is published by its administrator. +dataset.mayNotPublish.both= This dataset cannot be published until {0} is published. Would you like to publish both right now? +dataset.mayNotPublish.twoGenerations= This dataset cannot be published until {0} and {1} are published. +dataset.mayNotBePublished.both.button=Yes, Publish Both +dataset.viewVersion.unpublished=View Unpublished Version +dataset.viewVersion.published=View Published Version +dataset.link.title=Link Dataset +dataset.link.save=Save Linked Dataset +dataset.link.not.to.owner=Can't link a dataset to its dataverse +dataset.link.not.to.parent.dataverse=Can't link a dataset to its parent dataverses +dataset.link.not.published=Can't link a dataset that has not been published +dataset.link.not.available=Can't link a dataset that has not been published or is not harvested +dataset.link.not.already.linked=Can't link a dataset that has already been linked to this dataverse +dataset.email.datasetContactTitle=Contact Dataset Owner +dataset.email.hiddenMessage= +dataset.email.messageSubject=Test Message Subject +dataset.email.datasetLinkBtn.tip=Link Dataset to Your Dataverse +dataset.share.datasetShare=Share Dataset +dataset.share.datasetShare.tip=Share this dataset on your favorite social media networks. +dataset.share.datasetShare.shareText=View this dataset. +dataset.locked.message=Dataset Locked +dataset.locked.message.details=This dataset is locked until publication. +dataset.locked.inReview.message=Submitted for Review +dataset.locked.ingest.message=The tabular data files uploaded are being processed and converted into the archival format +dataset.unlocked.ingest.message=The tabular files have been ingested. +dataset.locked.editInProgress.message=Edit In Progress +dataset.locked.editInProgress.message.details=Additional edits cannot be made at this time. Contact {0} if this status persists. +dataset.locked.pidNotReserved.message=Dataset DOI Not Reserved +dataset.locked.pidNotReserved.message.details=The DOI displayed in the citation for this dataset has not yet been reserved with DataCite. Please do not share this DOI until it has been reserved. +dataset.publish.error=This dataset may not be published due to an error when contacting the {0} Service. Please try again. +dataset.publish.error.doi=This dataset may not be published because the DOI update failed. +dataset.publish.file.validation.error.message=Failed to Publish Dataset +dataset.publish.file.validation.error.details=The dataset could not be published because one or more of the datafiles in the dataset could not be validated (physical file missing, checksum mismatch, etc.) +dataset.publish.file.validation.error.contactSupport=The dataset could not be published because one or more of the datafiles in the dataset could not be validated (physical file missing, checksum mismatch, etc.) Please contact support for further assistance. +dataset.publish.file.validation.error.noChecksumType=Checksum type not defined for datafile id {0} +dataset.publish.file.validation.error.failRead=Failed to open datafile id {0} for reading +dataset.publish.file.validation.error.failCalculateChecksum=Failed to calculate checksum for datafile id {0} +dataset.publish.file.validation.error.wrongChecksumValue=Checksum mismatch for datafile id {0} +dataset.compute.computeBatchSingle=Compute Dataset +dataset.compute.computeBatchList=List Batch +dataset.compute.computeBatchAdd=Add to Batch +dataset.compute.computeBatchClear=Clear Batch +dataset.compute.computeBatchRemove=Remove from Batch +dataset.compute.computeBatchCompute=Compute Batch +dataset.compute.computeBatch.success=The list of datasets in your compute batch has been updated. +dataset.compute.computeBatch.failure=The list of datasets in your compute batch failed to be updated. Please try again. +dataset.compute.computeBtn=Compute +dataset.compute.computeBatchListHeader=Compute Batch +dataset.compute.computeBatchRestricted=This dataset contains restricted files you may not compute on because you have not been granted access. +dataset.delete.error=Could not deaccession the dataset because the {0} update failed. +dataset.publish.workflow.message=Publish in Progress +dataset.publish.workflow.inprogress=This dataset is locked until publication. +dataset.pidRegister.workflow.inprogress=The dataset is locked while the persistent identifiers are being registered or updated, and/or the physical files are being validated. +dataset.versionUI.draft=Draft +dataset.versionUI.inReview=In Review +dataset.versionUI.unpublished=Unpublished +dataset.versionUI.deaccessioned=Deaccessioned +dataset.cite.title.released=DRAFT VERSION will be replaced in the citation with V1 once the dataset has been published. +dataset.cite.title.draft=DRAFT VERSION will be replaced in the citation with the selected version once the dataset has been published. +dataset.cite.title.deassessioned=DEACCESSIONED VERSION has been added to the citation for this version since it is no longer available. +dataset.cite.standards.tip=Learn about Data Citation Standards. +dataset.cite.downloadBtn=Cite Dataset +dataset.cite.downloadBtn.xml=EndNote XML +dataset.cite.downloadBtn.ris=RIS +dataset.cite.downloadBtn.bib=BibTeX +dataset.create.authenticatedUsersOnly=Only authenticated users can create datasets. +dataset.deaccession.reason=Deaccession Reason +dataset.beAccessedAt=The dataset can now be accessed at: +dataset.descriptionDisplay.title=Description +dataset.keywordDisplay.title=Keyword +dataset.subjectDisplay.title=Subject +dataset.contact.tip=Use email button above to contact. +dataset.asterisk.tip=Asterisks indicate required fields +dataset.message.uploadFiles.label=Upload Dataset Files +dataset.message.uploadFilesSingle.message=For more information about supported file formats, please refer to the User Guide. +dataset.message.uploadFilesMultiple.message=Multiple file upload/download methods are available for this dataset. Once you upload a file using one of these methods, your choice will be locked in for this dataset. +dataset.message.editMetadata.label=Edit Dataset Metadata +dataset.message.editMetadata.message=Add more metadata about this dataset to help others easily find it. +dataset.message.editMetadata.duplicateFilenames=Duplicate filenames: {0} +dataset.message.editTerms.label=Edit Dataset Terms +dataset.message.editTerms.message=Add the terms of use for this dataset to explain how to access and use your data. +dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. +dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock. +dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. +dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. +dataset.message.locked.publishNotAllowed=Dataset cannot be published due to dataset lock. +dataset.message.createSuccess=This dataset has been created. +dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again. +dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again. +dataset.message.linkSuccess= {0} has been successfully linked to {1}. +dataset.message.metadataSuccess=The metadata for this dataset has been updated. +dataset.message.termsSuccess=The terms for this dataset have been updated. +dataset.message.filesSuccess=The files for this dataset have been updated. +dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try uploading the file(s) again. +dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again. +dataset.message.publish.remind.draft=If it's ready for sharing, please publish it. +dataset.message.submit.remind.draft=If it's ready for sharing, please submit it for review. +dataset.message.publish.remind.version=If it's ready for sharing, please publish it so that others can see these changes. +dataset.message.submit.remind.version=If it's ready for sharing, please submit it for review so that others can see these changes. +dataset.message.publishSuccess=This dataset has been published. +dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. +dataset.message.deleteSuccess=This dataset has been deleted. +dataset.message.bulkFileUpdateSuccess=The selected files have been updated. +dataset.message.bulkFileDeleteSuccess=The selected files have been deleted. +datasetVersion.message.deleteSuccess=This dataset draft has been deleted. +datasetVersion.message.deaccessionSuccess=The selected version(s) have been deaccessioned. +dataset.message.deaccessionSuccess=This dataset has been deaccessioned. +dataset.message.publishFailure=The dataset could not be published. +dataset.message.metadataFailure=The metadata could not be updated. +dataset.message.filesFailure=The files could not be updated. +dataset.message.bulkFileDeleteFailure=The selected files could not be deleted. +dataset.message.files.ingestFailure=The file(s) could not be ingested. +dataset.message.deleteFailure=This dataset draft could not be deleted. +dataset.message.deaccessionFailure=This dataset could not be deaccessioned. +dataset.message.createFailure=The dataset could not be created. +dataset.message.termsFailure=The dataset terms could not be updated. +dataset.message.label.fileAccess=File Access +dataset.message.publicInstall=Files are stored on a publicly accessible storage server. +dataset.metadata.publicationDate=Publication Date +dataset.metadata.publicationDate.tip=The publication date of a dataset. +dataset.metadata.publicationYear=Publication Year +dataset.metadata.publicationYear.tip=The publication year of a dataset. +dataset.metadata.persistentId=Dataset Persistent ID +dataset.metadata.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +dataset.metadata.alternativePersistentId=Previous Dataset Persistent ID +dataset.metadata.alternativePersistentId.tip=A previously used persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +file.metadata.preview=Preview +file.metadata.filetags=File Tags +file.metadata.persistentId=File Persistent ID +file.metadata.persistentId.tip=The unique persistent identifier for a file, which can be a Handle or DOI in Dataverse. +dataset.versionDifferences.termsOfUseAccess=Terms of Use and Access +dataset.versionDifferences.termsOfUseAccessChanged=Terms of Use/Access Changed +dataset.versionDifferences.metadataBlock=Metadata Block +dataset.versionDifferences.field=Field +dataset.versionDifferences.changed=Changed +dataset.versionDifferences.from=From +dataset.versionDifferences.to=To +file.viewDiffDialog.fileAccess=Access +dataset.host.tip=Changing the host dataverse will clear any fields you may have entered data into. +dataset.template.tip=Changing the template will clear any fields you may have entered data into. +dataset.noTemplate.label=None +dataset.noSelectedFiles.header=Select File(s) +dataset.noSelectedFiles=Please select one or more files. +dataset.noSelectedFilesForDownload=Please select a file or files to be downloaded. +dataset.noSelectedFilesForRequestAccess=Please select a file or files for access request. +dataset.inValidSelectedFilesForDownload=Restricted Files Selected +dataset.noValidSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.downloadUnrestricted=Click Continue to download the files you have access to download. + +dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button. +dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} +dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. You will not be able to access it when logged into your Dataverse account. +dataset.privateurl.header=Unpublished Dataset Private URL +dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. +dataset.privateurl.absent=Private URL has not been created. +dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access +dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published +dataset.privateurl.disablePrivateUrl=Disable Private URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.createdSuccess=Success! +dataset.privateurl.full=This Private URL provides full read access to the dataset +dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset +dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +file.display.label=Change View +file.display.table=Table +file.display.tree=Tree +file.count.label=File Count +file.count.one=1 File +file.count={0} to {1} of {2} {2, choice, 0#Files|1#File|2#Files} +file.count.shown={0} {0, choice, 0#Files Selected|1#File|2#Files} +file.clearSelection=Clear selection. +file.zip.download.exceeds.limit=The overall size of the files selected ({0}) for download exceeds the zip limit of {1}. Please unselect some files to continue. +file.zip.download.exceeds.limit.info=The files selected are too large to download as a ZIP. +file.zip.download.exceeds.limit.detail=You can select individual files that are below the {2} download limit from the files table, or use the Data Access API for programmatic access to the files. +file.zip.download.exceeds.limit.header=Download Options +file.numFilesSelected={0} {0, choice, 0#files are|1#file is|2#files are} currently selected. +file.select.tooltip=Select Files +file.selectAllFiles=Select all {0} files in this dataset. +file.dynamicCounter.filesPerPage=Files Per Page +file.selectToAddBtn=Select Files to Add +file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipMoreInformation=Select files or drag and drop into the upload widget. +file.selectToAdd.dragdropMsg=Drag and drop files here. +file.createUploadDisabled=Upload files using rsync via SSH. This method is recommended for large file transfers. The upload script will be available on the Upload Files page once you save this dataset. +file.fromHTTP=Upload with HTTP via your browser +file.fromDropbox=Upload from Dropbox +file.fromDropbox.tip=Select files from Dropbox. +file.fromRsync=Upload with rsync + SSH via Data Capture Module (DCM) +file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. +file.api.alreadyHasPackageFile=File upload via HTTP disabled since this dataset already contains a package file. +file.replace.original=Original File +file.editFiles=Edit Files +file.editFilesSelected=Edit +file.editFile=Edit + +file.actionsBlock=File Actions +file.accessBtn=Access File +file.accessBtn.header.download=Download Options +file.optionsBtn=File Options +file.optionsBtn.header.edit=Edit Options +file.optionsBtn.header.configure=Configure Options +file.editBtn=Edit File +file.contactBtn=Contact Owner +file.shareBtn=Share +file.share.title=Share File +file.share.tip=Share this file on your favorite social media networks. +file.share.text=View this file. +file.bulkUpdate=Bulk Update +file.uploadFiles=Upload Files +file.replaceFile=Replace File +file.notFound.tip=There are no files in this dataset. +file.notFound.search=There are no files that match your search. Please change the search terms and try again. +file.noSelectedFiles.tip=There are no selected files to display. +file.noUploadedFiles.tip=Files you upload will appear here. +file.replace=Replace +file.alreadyDeleted.warning.header=Edit File +file.alreadyDeleted.previous.warningMessage=This file has already been deleted (or replaced) in the current version. It may not be edited. +file.delete=Delete +file.delete.duplicate.multiple=Delete Duplicate Files +file.delete.duplicate.single=Delete Duplicate File +file.metadata=Metadata +file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button. +file.deleted.replacement.success=The replacement file has been deleted. +file.deleted.upload.success.single=File has been deleted and won\u2019t be included in this upload. +file.deleted.upload.success.multiple=Files have been deleted and won\u2019t be included in this upload. +file.editAccess=Edit Access +file.restrict=Restrict +file.unrestrict=Unrestrict +file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button. +file.download.header=Download +file.download.subset.header=Download Data Subset +file.preview=Preview: +file.fileName=File Name +file.type.tabularData=Tabular Data +file.originalChecksumType=Original File {0} +file.checksum.exists.tip=A file with this checksum already exists in the dataset. +file.selectedThumbnail=Thumbnail +file.selectedThumbnail.tip=The thumbnail for this file is used as the default thumbnail for the dataset. Click 'Advanced Options' button of another file to select that file. +file.cloudStorageAccess=Cloud Storage Access +file.cloudStorageAccess.tip=The container name for this dataset needed to access files in cloud storage. +file.cloudStorageAccess.help=To directly access this data in the {2} cloud environment, use the container name in the Cloud Storage Access box below. To learn more about the cloud environment, visit the Cloud Storage Access section of the User Guide. +file.copy=Copy +file.compute=Compute +file.rsyncUpload.info=Upload files using rsync + SSH. This method is recommended for large file transfers. Follow the steps below to upload your data. (User Guide - rsync Upload). +file.rsyncUpload.filesExist=You cannot upload additional files to this dataset. A dataset can only hold one data package. If you need to replace the data package in this dataset, please contact {0}. +file.rsyncUpload.noScriptBroken=The Data Capture Module failed to generate the rsync script. Please contact {0}. +file.rsyncUpload.noScriptBusy=Currently generating rsync script. If the script takes longer than ten minutes to generate, please contact {0}. +file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset. +file.rsyncUpload.step2=Download this file upload script: +file.rsyncUpload.step2.downloadScriptButton=Download DCM Script +file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0} +file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days. +file.rsyncUpload.inProgressMessage.summary=File Upload in Progress +file.rsyncUpload.inProgressMessage.details=This dataset is locked while the data files are being transferred and verified. +file.rsyncUpload.httpUploadDisabledDueToRsyncFileExisting=HTTP upload is disabled for this dataset because you have already uploaded files via rsync. If you would like to switch to HTTP upload, please contact {0}. +file.rsyncUpload.httpUploadDisabledDueToRsyncFileExistingAndPublished=HTTP upload is disabled for this dataset because you have already uploaded files via rsync and published the dataset. +file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttp=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP. If you would like to switch to rsync upload, then you must first remove all uploaded files from this dataset. Once this dataset is published, the chosen upload method is permanently locked in. +file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttpAndPublished=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP and published the dataset. +file.metaData.checksum.copy=Click to copy +file.metaData.dataFile.dataTab.unf=UNF +file.metaData.dataFile.dataTab.variables=Variables +file.metaData.dataFile.dataTab.observations=Observations +file.metaData.fileAccess=File Access: +file.addDescription=Add file description... +file.tags=Tags +file.editTags=Edit Tags +file.editTagsDialog.tip=Select existing file tags or create new tags to describe your files. Each file can have more than one tag. +file.editTagsDialog.select=File Tags +file.editTagsDialog.selectedTags=Selected Tags +file.editTagsDialog.selectedTags.none=No tags selected +file.editTagsDialog.add=Custom File Tag +file.editTagsDialog.add.tip=Creating a new tag will add it as a tag option for all files in this dataset. +file.editTagsDialog.newName=Add new file tag... +dataset.removeUnusedFileTags.label=Delete Tags +dataset.removeUnusedFileTags.tip=Select to delete Custom File Tags not used by the files in the dataset. +dataset.removeUnusedFileTags.check=Delete tags not being used +file.setThumbnail=Set Thumbnail +file.setThumbnail.header=Set Dataset Thumbnail +file.datasetThumbnail=Dataset Thumbnail +file.datasetThumbnail.tip=Select to use this image as the thumbnail image that is displayed in the search results for this dataset. +file.setThumbnail.confirmation=Are you sure you want to set this image as your dataset thumbnail? There is already an image uploaded to be the thumbnail and this action will remove it. +file.useThisIamge=Use this image as the dataset thumbnail image +file.advancedOptions=Advanced Options +file.advancedIngestOptions=Advanced Ingest Options +file.assignedDataverseImage.success={0} has been saved as the thumbnail for this dataset. +file.assignedTabFileTags.success=The tags were successfully added for {0}. +file.tabularDataTags=Tabular Data Tags +file.tabularDataTags.tip=Select a tag to describe the type(s) of data this is (survey, time series, geospatial, etc). +file.spss-savEncoding=Language Encoding +file.spss-savEncoding.title=Select the language used for encoding this SPSS (sav) Data file. +file.spss-savEncoding.current=Current Selection: +file.spss-porExtraLabels=Variable Labels +file.spss-porExtraLabels.title=Upload an additional text file with extra variable labels. +file.spss-porExtraLabels.selectToAddBtn=Select File to Add +file.ingestFailed.header=Upload Completed with Errors +file.ingestFailed.message=Tabular data ingest failed. +file.downloadBtn.format.all=All File Formats + Information +file.downloadBtn.format.tab=Tab-Delimited +file.downloadBtn.format.original={0} (Original File Format) +file.downloadBtn.format.rdata=RData +file.downloadBtn.format.var=Variable Metadata +file.downloadBtn.format.citation=Data File Citation +file.download.filetype.unknown=Original File Format +file.more.information.link=Link to more file information for +file.requestAccess=Request Access +file.requestAccess.dialog.msg=You need to Log In to request access. +#file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. +#UB +fietAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. + +.accessRequested=Access Requested +file.ingestInProgress=Ingest in progress... +file.dataFilesTab.metadata.header=Metadata +file.dataFilesTab.metadata.addBtn=Add + Edit Metadata +file.dataFilesTab.terms.header=Terms +file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements +file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver +file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication" +file.cc0.icon.alttxt=Creative Commons CC0 1.0 Public Domain Dedication icon +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Datasets will default to a CC0 public domain dedication . CC0 facilitates reuse and extensibility of research data. Our Community Norms as well as good scientific practices expect that proper credit is given via citation. If you are unable to give datasets a CC0 waiver you may enter custom Terms of Use for datasets. +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No waiver has been selected for this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Our Community Norms as well as good scientific practices expect that proper credit is given via citation. Please use the data citation above, generated by the Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Yes, apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, do not apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=This is what end users will see displayed on this dataset +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Outlines how this data can be used once downloaded. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=If you are unable to use CC0 for datasets you are able to set custom terms of use. Here is an example of a Data Usage Agreement for datasets that have de-identified human subject data. +file.dataFilesTab.terms.list.termsOfUse.addInfo=Additional Information +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Confidentiality Declaration +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indicates whether signing of a confidentiality declaration is needed to access a resource. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Special Permissions +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Determine if any special permissions are required to access a resource (e.g., if form is a needed and where to access the form). +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restrictions +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Any restrictions on access to or use of the collection, such as privacy certification or distribution restrictions, should be indicated here. These can be restrictions applied by the author, producer, or disseminator of the data collection. If the data are restricted to only a certain class of user, specify which type. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Citation Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=Include special/explicit citation requirements for data to be cited properly in articles or other publications that are based on analysis of the data. For standard data citation requirements refer to our Community Norms. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Depositor Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Information regarding user responsibility for informing Dataset Depositors, Authors or Curators of their use of data through providing citations to the published work or providing copies of the manuscripts. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Any additional information that will assist the user in understanding the access and use conditions of the Dataset. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Disclaimer +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Information regarding responsibility for uses of the Dataset. +file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can gain access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Users may request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Users may not request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Enable access request +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Data Access Place +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=If the data is not only in Dataverse, list the location(s) where the data are currently stored. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Original Archive +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Archive from which the data was obtained. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=Availability Status +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=Statement of Dataset availability. A depositor may need to indicate that a Dataset is unavailable because it is embargoed for a period of time, because it has been superseded, because a new edition is imminent, etc. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Contact for Access +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=If different from the Dataset Contact, this is the Contact person or organization (include email or full address, and telephone number if available) that controls access to a collection. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Size of Collection +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Summary of the number of physical files that exist in a Dataset, recording the number of files that contain data and noting whether the collection contains machine readable documentation and/or other supplementary files and information, such as code, data dictionaries, data definition statements, or data collection instruments. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Study Completion +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relationship of the data collected to the amount of data coded and stored in the Dataset. Information as to why certain items of collected information were not included in the dataset or a specific data file should be provided. +file.dataFilesTab.terms.list.guestbook=Guestbook +file.dataFilesTab.terms.list.guestbook.title=User information (i.e., name, email, institution, and position) will be collected when files are downloaded. +file.dataFilesTab.terms.list.guestbook.noSelected.tip=No guestbook is assigned to this dataset, you will not be prompted to provide any information on file download. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=There are no guestbooks available in {0} to assign to this dataset. +file.dataFilesTab.terms.list.guestbook.inUse.tip=The following guestbook will prompt a user to provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.viewBtn=Preview Guestbook +file.dataFilesTab.terms.list.guestbook.select.tip=Select a guestbook to have a user provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=There are no guestbooks enabled in {0}. To create a guestbook, return to {0}, click the "Edit" button and select the "Dataset Guestbooks" option. +file.dataFilesTab.terms.list.guestbook.clearBtn=Clear Selection + +file.dataFilesTab.dataAccess=Data Access +file.dataFilesTab.dataAccess.info=This data file can be accessed through a terminal window, using the commands below. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.info.draft=Data files can not be accessed until the dataset draft has been published. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.local.label=Local Access +file.dataFilesTab.dataAccess.download.label=Download Access +file.dataFilesTab.dataAccess.verify.label=Verify Data +file.dataFilesTab.dataAccess.local.tooltip=If this data is locally available to you, this is its file path. +file.dataFilesTab.dataAccess.download.tooltip=Download this data from your preferred mirror by running this command. +file.dataFilesTab.dataAccess.verify.tooltip=This command runs a checksum to verify the integrity of the data you have downloaded. +file.dataFilesTab.button.direct=Direct + +file.dataFilesTab.versions=Versions +file.dataFilesTab.versions.headers.dataset=Dataset +file.dataFilesTab.versions.headers.summary=Summary +file.dataFilesTab.versions.headers.contributors=Contributors +file.dataFilesTab.versions.headers.contributors.withheld=Contributor name(s) withheld +file.dataFilesTab.versions.headers.published=Published +file.dataFilesTab.versions.viewDiffBtn=View Differences +file.dataFilesTab.versions.citationMetadata=Citation Metadata: +file.dataFilesTab.versions.added=Added +file.dataFilesTab.versions.removed=Removed +file.dataFilesTab.versions.changed=Changed +file.dataFilesTab.versions.replaced=Replaced +file.dataFilesTab.versions.original=Original +file.dataFilesTab.versions.replacment=Replacement +file.dataFilesTab.versions.additionalCitationMetadata=Additional Citation Metadata: +file.dataFilesTab.versions.description.draft=This is a draft version. +file.dataFilesTab.versions.description.deaccessioned=Due to the previous version being deaccessioned, there are no difference notes available for this published version. +file.dataFilesTab.versions.description.firstPublished=This is the first published version. +file.dataFilesTab.versions.description.deaccessionedReason=Deaccessioned Reason: +file.dataFilesTab.versions.description.beAccessedAt=The dataset can now be accessed at: +file.dataFilesTab.versions.viewDetails.btn=View Details +file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about the versions of this dataset, and to edit it if this is your dataset, please visit the full version of this dataset at the {2}. +file.dataFilesTab.versions.preloadmessage=(Loading versions...) +file.previewTab.externalTools.header=Available Previews +file.previewTab.button.label=Preview +file.previewTab.previews.not.available=Public previews are not available for this file. +file.deleteDialog.tip=Are you sure you want to delete this dataset and all of its files? You cannot undelete this dataset. +file.deleteDialog.header=Delete Dataset +file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? Files will be reverted to the most recently published version. You cannot undelete this draft. +file.deleteDraftDialog.header=Delete Draft Version +file.deleteFileDialog.tip=The file(s) will be deleted after you click on the Save Changes button on the bottom of this page. +file.deleteFileDialog.immediate=The file will be deleted after you click on the Delete button. +file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you click on the Delete button. +file.deleteFileDialog.header=Delete Files +file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset. +file.deaccessionDialog.tip=Once you deaccession this dataset it will no longer be viewable by the public. +file.deaccessionDialog.version=Version +file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession? +file.deaccessionDialog.reason.question2=What is the reason for deaccession? +file.deaccessionDialog.reason.selectItem.identifiable=There is identifiable data in one or more files. +file.deaccessionDialog.reason.selectItem.beRetracted=The research article has been retracted. +file.deaccessionDialog.reason.selectItem.beTransferred=The dataset has been transferred to another repository. +file.deaccessionDialog.reason.selectItem.IRB=IRB request. +file.deaccessionDialog.reason.selectItem.legalIssue=Legal issue or Data Usage Agreement. +file.deaccessionDialog.reason.selectItem.notValid=Not a valid dataset. +file.deaccessionDialog.reason.selectItem.other=Other (Please type reason in space provided below) +file.deaccessionDialog.enterInfo=Please enter additional information about the reason for deaccession. +file.deaccessionDialog.leaveURL=If applicable, please leave a URL where this dataset can be accessed after deaccessioning. +file.deaccessionDialog.leaveURL.watermark=Optional dataset site, http://... +file.deaccessionDialog.deaccession.tip=Are you sure you want to deaccession? The selected version(s) will no longer be viewable by the public. +file.deaccessionDialog.deaccessionDataset.tip=Are you sure you want to deaccession this dataset? It will no longer be viewable by the public. +file.deaccessionDialog.dialog.selectVersion.error=Please select version(s) for deaccessioning. +file.deaccessionDialog.dialog.reason.error=Please select reason for deaccessioning. +file.deaccessionDialog.dialog.url.error=Please enter valid forwarding URL. +file.deaccessionDialog.dialog.textForReason.error=Please enter text for reason for deaccessioning. +file.deaccessionDialog.dialog.limitChar.error=Text for reason for deaccessioning may be no longer than {0} characters. +file.viewDiffDialog.header=Version Differences Details +file.viewDiffDialog.dialog.warning=Please select two versions to view the differences. +file.viewDiffDialog.notAvailable=N/A +file.viewDiffDialog.version=Version +file.viewDiffDialog.lastUpdated=Last Updated +file.viewDiffDialog.fileID=File ID +file.viewDiffDialog.fileName=Name +file.viewDiffDialog.fileType=Type +file.viewDiffDialog.fileSize=Size +file.viewDiffDialog.category=Tags +file.viewDiffDialog.description=Description +file.viewDiffDialog.provDescription=Provenance Description +file.viewDiffDialog.fileReplaced=File Replaced +file.viewDiffDialog.filesReplaced=File(s) Replaced +file.viewDiffDialog.files.header=Files +file.viewDiffDialog.msg.draftFound= This is the "DRAFT" version. +file.viewDiffDialog.msg.draftNotFound=The "DRAFT" version was not found. +file.viewDiffDialog.msg.versionFound= This is version "{0}". +file.viewDiffDialog.msg.versionNotFound=Version "{0}" was not found. +file.metadataTip=Metadata Tip: After adding the dataset, click the Edit Dataset button to add more metadata. +file.addBtn=Save Dataset +file.dataset.allFiles=All Files from this Dataset +file.downloadDialog.header=Dataset Terms +file.downloadDialog.tip=Please confirm and/or complete the information needed below in order to continue. +file.requestAccessTermsDialog.tip=Please confirm and/or complete the information needed below in order to request access to files in this dataset. +file.requestAccess.notAllowed=Requests for access are not accepted on the Dataset. +file.requestAccess.notAllowed.alreadyHasDownloadPermisssion=User already has permission to download this file. Request Access is invalid. + +file.search.placeholder=Search this dataset... +file.results.filter=Filter by +file.results.filter.type=File Type: +file.results.filter.access=Access: +file.results.filter.tag=File Tag: +file.results.filter.all=All +file.results.btn.sort=Sort +file.results.btn.sort.option.nameAZ=Name (A-Z) +file.results.btn.sort.option.nameZA=Name (Z-A) +file.results.btn.sort.option.newest=Newest +file.results.btn.sort.option.oldest=Oldest +file.results.btn.sort.option.size=Size +file.results.btn.sort.option.type=Type +file.compute.fileAccessDenied=This file is restricted and you may not compute on it because you have not been granted access. +file.configure.Button=Configure + +file.auxfiles.download.header=Download Auxiliary Files +# These types correspond to the AuxiliaryFile.Type enum. +file.auxfiles.types.DP=Differentially Private Statistics +# Add more types here +file.auxfiles.unspecifiedTypes=Other Auxiliary Files + +# dataset-widgets.xhtml +dataset.widgets.title=Dataset Thumbnail + Widgets +dataset.widgets.notPublished.why.header=Why Use Widgets? +dataset.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataset.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataset.widgets.notPublished.how.header=How To Use Widgets +dataset.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataset.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataset.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataset.widgets.notPublished.getStarted=To get started, publish your dataset. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.editAdvanced=Edit Advanced Options +dataset.widgets.editAdvanced.tip=Advanced Options – Additional options for configuring your widget on your personal or project website. +dataset.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.citation.txt=Dataset Citation +dataset.widgets.citation.tip=Add a citation for your dataset to your personal or project website. +dataset.widgets.datasetFull.txt=Dataset +dataset.widgets.datasetFull.tip=Add a way for visitors on your website to be able to view your datasets, download files, etc. +dataset.widgets.advanced.popup.header=Widget Advanced Options +dataset.widgets.advanced.prompt=Forward persistent URL's in your dataset citation to your personal website. +dataset.widgets.advanced.url.label=Personal Website URL +dataset.widgets.advanced.url.watermark=http://www.example.com/page-name +dataset.widgets.advanced.invalid.message=Please enter a valid URL +dataset.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataset.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. +dataset.thumbnailsAndWidget.breadcrumbs.title=Thumbnail + Widgets +dataset.thumbnailsAndWidget.thumbnails.title=Thumbnail +dataset.thumbnailsAndWidget.widgets.title=Widgets +dataset.thumbnailsAndWidget.thumbnailImage=Thumbnail Image +dataset.thumbnailsAndWidget.thumbnailImage.title=The logo or image file you wish to display as the thumbnail of this dataset. +dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG, TIF, or PNG and should be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 48 pixels wide by 48 pixels high. +dataset.thumbnailsAndWidget.thumbnailImage.default=Default Icon +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Select Available File +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Select Thumbnail +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Select a thumbnail from those available as image data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Upload New File +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Upload an image file as your dataset thumbnail, which will be stored separately from the data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.upload=Upload Image +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataset.thumbnailsAndWidget.thumbnailImage.alt=Thumbnail image selected for dataset +dataset.thumbnailsAndWidget.success=Dataset thumbnail updated. +dataset.thumbnailsAndWidget.removeThumbnail=Remove Thumbnail +dataset.thumbnailsAndWidget.removeThumbnail.tip=You are only removing this image as the dataset thumbnail, not removing it from your dataset. To do that, go to the Edit Files page. +dataset.thumbnailsAndWidget.availableThumbnails=Available Thumbnails +dataset.thumbnailsAndWidget.availableThumbnails.tip=Select a thumbnail from the data files that belong to your dataset. Continue back to the Thumbnail + Widgets page to save your changes. + +# file.xhtml +file.share.fileShare=Share File +file.share.fileShare.tip=Share this file on your favorite social media networks. +file.share.fileShare.shareText=View this file. +file.title.label=Title +file.citation.label=Citation +file.citation.notice=This file is part of "{0}". +file.citation.dataset=Dataset Citation +file.citation.datafile=File Citation +file.cite.downloadBtn=Cite Dataset +file.cite.file.downloadBtn=Cite Data File +file.pid.label=File Persistent ID: +file.unf.lable= File UNF: +file.general.metadata.label=General Metadata +file.description.label=Description +file.tags.label=Tags +file.lastupdated.label=Last Updated +file.DatasetVersion=Version + +file.previewTab.tool.open=Open +file.previewTab.header=Preview +file.previewTab.presentation=File Preview Tool +file.previewTab.openBtn=Open in New Window +file.previewTab.exploreBtn={0} on {1} +file.metadataTab.fileMetadata.header=File Metadata +file.metadataTab.fileMetadata.persistentid.label=Data File Persistent ID +file.metadataTab.fileMetadata.downloadUrl.label=Download URL +file.metadataTab.fileMetadata.downloadUrl.info=Use the Download URL in a Wget command or a download manager to avoid interrupted downloads, time outs or other failures. User Guide - Downloading via URL +file.metadataTab.fileMetadata.unf.label=File UNF +file.metadataTab.fileMetadata.size.label=Size +file.metadataTab.fileMetadata.type.label=Type +file.metadataTab.fileMetadata.description.label=Description +file.metadataTab.fileMetadata.publicationDate.label=Publication Date +file.metadataTab.fileMetadata.depositDate.label=Deposit Date +file.metadataTab.fileMetadata.hierarchy.label=File Path +file.metadataTab.fileMetadata.hierarchy.tip=Hierarchical directory structure path used to display file organization and support reproducibility. +file.metadataTab.fitsMetadata.header=FITS Metadata + +file.versionDifferences.noChanges=No changes associated with this version +file.versionDifferences.fileNotInVersion=File not included in this version +file.versionDifferences.actionChanged=Changed +file.versionDifferences.actionAdded=Added +file.versionDifferences.actionRemoved=Removed +file.versionDifferences.actionReplaced=Replaced +file.versionDifferences.fileMetadataGroupTitle=File Metadata +file.versionDifferences.fileTagsGroupTitle=File Tags +file.versionDifferences.descriptionDetailTitle=Description +file.versionDifferences.provenanceDetailTitle=Provenance +file.versionDifferences.fileNameDetailTitle=File Name +file.versionDifferences.fileAccessTitle=File Access +file.versionDifferences.fileRestricted=Restricted +file.versionDifferences.fileUnrestricted=Unrestricted +file.versionDifferences.fileGroupTitle=File + +file.anonymized.authorsWithheld=Author name(s) withheld +# File Ingest +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. + +# editdatafile.xhtml + +# editFilesFragment.xhtml +file.edit.error.file_exceeds_limit=This file exceeds the size limit. +# File metadata error +file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non-tabular file. +file.metadata.filedirectory.invalidCharacters=Directory Name cannot contain invalid characters. Valid characters are a-Z, 0-9, '_', '-', '.', '\\', '/' and ' ' (white space). + +# File Edit Success +file.message.editSuccess=The file has been updated. +file.message.deleteSuccess=The file has been deleted. +file.message.replaceSuccess=The file has been replaced. + +# File Add/Replace operation messages +file.addreplace.file_size_ok=File size is in range. +file.addreplace.error.byte_abrev=B +file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. +file.addreplace.error.dataset_is_null=The dataset cannot be null. +file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +file.addreplace.error.parsing=Error in parsing provided json +file.addreplace.warning.unzip.failed=Failed to unzip the file. Saving the file as is. +file.addreplace.warning.unzip.failed.size=A file contained in this zip file exceeds the size limit of {0}. This Dataverse installation will save and display the zipped file, rather than unpacking and displaying files. +find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present. +find.dataset.error.dataset.not.found.persistentId=Dataset with Persistent ID {0} not found. +find.dataset.error.dataset.not.found.id=Dataset with ID {0} not found. +find.dataset.error.dataset.not.found.bad.id=Bad dataset ID number: {0}. +find.datasetlinking.error.not.found.ids=Dataset linking dataverse with dataset ID {0} and dataset linking dataverse ID {1} not found. +find.datasetlinking.error.not.found.bad.ids=Bad dataset ID number: {0} or dataset linking dataverse ID number: {1}. +find.dataverselinking.error.not.found.ids=Dataverse linking dataverse with dataverse ID {0} and dataverse linking dataverse ID {1} not found. +find.dataverselinking.error.not.found.bad.ids=Bad dataverse ID number: {0} or dataverse linking dataverse ID number: {1}. +find.datafile.error.datafile.not.found.id=File with ID {0} not found. +find.datafile.error.datafile.not.found.bad.id=Bad file ID number: {0}. +find.datafile.error.dataset.not.found.persistentId=Datafile with Persistent ID {0} not found. +find.dataverse.role.error.role.not.found.id=Dataverse Role with ID {0} not found. +find.dataverse.role.error.role.not.found.bad.id=Bad Dataverse Role ID number: {0} +find.dataverse.role.error.role.not.found.alias=Dataverse Role with alias {0} not found. +find.dataverse.role.error.role.builtin.not.allowed=May not delete Built In Role {0}. +file.addreplace.error.dataset_id_not_found=There was no dataset found for ID: +file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. +file.addreplace.error.filename_undetermined=The file name cannot be determined. +file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined. +file.addreplace.error.file_upload_failed=The file upload failed. +file.addreplace.warning.duplicate_file=This file has the same content as {0} that is in the dataset. +file.addreplace.error.duplicate_file.continue=You may delete if it was not intentional. +file.addreplace.error.existing_file_to_replace_id_is_null=The ID of the existing file to replace must be provided. +file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for ID: {0} +file.addreplace.error.existing_file_to_replace_is_null=The file to replace cannot be null. +file.addreplace.error.existing_file_to_replace_not_in_dataset=The file to replace does not belong to this dataset. +file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published dataset. (The file is unpublished or was deleted from a previous version.) +file.addreplace.content_type.header=File Type Different +file.addreplace.already_exists.header=Duplicate File Uploaded +file.addreplace.already_exists.header.multiple=Duplicate Files Uploaded +file.addreplace.error.replace.new_file_has_different_content_type=The original file ({0}) and replacement file ({1}) are different file types. +file.addreplace.error.replace.new_file_same_as_replacement=Error! You may not replace a file with a file that has duplicate content. +file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it. +file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file. +file.addreplace.error.initial_file_list_empty=An error occurred and the new file was not added. +file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files. +file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence.) +file.addreplace.error.only_replace_operation=This should only be called for file replace operations! +file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion. +file.addreplace.error.add.add_file_error=Failed to add file to dataset. +file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset - no new files found. +file.addreplace.success.add=File successfully added! +file.addreplace.success.replace=File successfully replaced! +file.addreplace.error.auth=The API key is invalid. +file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Data Tag: + + + +# 500.xhtml +error.500.page.title=500 Internal Server Error +error.500.message=Internal Server Error - An unexpected error was encountered, no more information is available. + +# 404.xhtml +error.404.page.title=404 Not Found +error.404.message=Page Not Found - The page you are looking for was not found. + +# 403.xhtml +error.403.page.title=403 Not Authorized +error.403.message=Not Authorized - You are not authorized to view this page. + +# general error - support message +error.support.message= If you believe this is an error, please contact {0} for assistance. + +# citation-frame.xhtml +citationFrame.banner.message=If the site below does not load, the archived data can be found in the {0} {1}. {2} +citationFrame.banner.message.here=here +citationFrame.banner.closeIcon=Close this message, go to dataset +citationFrame.banner.countdownMessage= This message will close in +citationFrame.banner.countdownMessage.seconds=seconds + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. +citationFrame.banner.countdownMessage.seconds=seconds + +#file-edit-popup-fragment.xhtml #editFilesFragment.xhtml +dataset.access.accessHeader=Restrict Files and Add Dataset Terms of Access +dataset.access.description=Restricting limits access to published files. You can add or edit Terms of Access for the dataset, and allow people to Request Access to restricted files. + +#datasetFieldForEditFragment.xhtml +dataset.AddReplication=Add "Replication Data for" to Title +dataset.replicationDataFor=Replication Data for: + + +#mydata_fragment.xhtml +mydataFragment.infoAccess=Here are all the dataverses, datasets, and files you have access to. You can filter through them by publication status and roles. +mydataFragment.moreResults=View More Results +mydataFragment.publicationStatus=Publication Status +mydataFragment.roles=Roles +mydataFragment.resultsByUserName=Results by Username +mydataFragment.search=Search my data... +mydata.result=Result +mydata.results=Results +mydata.viewnext=View Next +mydata.more=More + +file.provenance=Provenance +file.editProvenanceDialog=Provenance +file.editProvenanceDialog.tip=Provenance is a record of the origin of your data file and any transformations it has been through. Upload a JSON file from a provenance capture tool to generate a graph of your data''s provenance. For more information, please refer to our User Guide. +file.editProvenanceDialog.uploadSuccess=Upload complete +file.editProvenanceDialog.uploadError=An error occurred during upload and parsing of your provenance file. +file.editProvenanceDialog.noEntitiesError=The uploaded provenance file does not contain any entities that can be related to your Data File. +file.editProvenanceDialog.invalidSchemaError=The uploaded provenance file does not comply with the W3C Provenance standard. +file.editProvenanceDialog.bundleFile=Provenance File +file.editProvenanceDialog.bundleFile.instructions=File must be JSON format and follow the W3C standard. +file.editProvenanceDialog.bundleFile.alreadyPublished=This Provenance File has been published and cannot be replaced or removed. +file.editProvenanceDialog.bundleEntity=Data File Entity +file.editProvenanceDialog.bundleEntity.placeholder=Connect entity... +file.editProvenanceDialog.bundleEntity.requiredValidation=Value is required. +file.editProvenanceDialog.bundleEntity.tip=Select the entity in your provenance file which represents your data file. +file.editProvenanceDialog.bundleEntity.nameHeader=Name +file.editProvenanceDialog.bundleEntity.typeHeader=Type +file.editProvenanceDialog.bundleEntity.entityHeader=Entity +file.editProvenanceDialog.selectToAddBtn=Select File +file.editProvenanceDialog.description.tip=You may also add information documenting the history of your data file, including how it was created, how it has changed, and who has worked with it. +file.editProvenanceDialog.description=Provenance Description +file.editProvenanceDialog.description.placeholder=Add provenance description... +file.confirmProvenanceDialog=Provenance +file.confirmProvenanceDialog.tip1=Once you publish this dataset, your provenance file can not be edited or replaced. +file.confirmProvenanceDialog.tip2=Select "Cancel" to return the previous page, where you can preview your provenance file to confirm it is correct. +file.metadataTab.provenance.header=File Provenance +file.metadataTab.provenance.body=File Provenance information coming in a later release... +file.metadataTab.provenance.error=Due to an internal error, your provenance information was not correctly saved. +file.metadataTab.provenance.message=Your provenance information has been received. Please click Save Changes below to ensure all data is added to your dataset. + +file.provConfirm.unpublished.json=Your Provenance File will become permanent upon publishing your dataset. Please preview to confirm before publishing. +file.provConfirm.published.json=Your Provenance File will become permanent once you click Save Changes. Please preview to confirm before you Save Changes. +file.provConfirm.freeform=Your Provenance Description is not permanent; it can be updated at any time. +file.provConfirm.empty=No changes have been made. + +file.provAlert.published.json=Your Provenance File changes have been saved to the Dataset. +file.provAlert.unpublished.json=Your Provenance File changes will be saved to this version of the Dataset once you click on the Save Changes button. +file.provAlert.freeform=Your Provenance Description changes will be saved to this version of the Dataset once you click on the Save Changes button. +file.provAlert.filePage.published.json=Your Provenance File changes have been saved to the Dataset. +file.provAlert.filePage.unpublished.json=Your Provenance File changes have been saved to this version of the Dataset. +file.provAlert.filePage.freeform=Your Provenance Description changes have been saved to this version of the Dataset. + +api.prov.provJsonSaved=PROV-JSON provenance data saved for Data File: +api.prov.provJsonDeleted=PROV-JSON deleted for the selected Data File. + +api.prov.error.provDisabled=This functionality has been administratively disabled. +api.prov.error.badDataFileId=Invalid DataFile ID. +api.prov.error.jsonUpdateNotAllowed=PROV-JSON cannot be updated for a published file that already has PROV-JSON. +api.prov.error.entityMismatch=Entity name provided does not match any entities parsed from the uploaded PROV-JSON. +api.prov.error.jsonDeleteNotAllowed=PROV-JSON cannot be deleted for a published file. +api.prov.error.jsonNoContent=No provenance json available for this file. +api.prov.error.freeformInvalidJson=A valid JSON object could not be found. +api.prov.error.freeformMissingJsonKey=The JSON object you send must have a key called 'text'. +api.prov.error.freeformNoText=No provenance free form text available for this file. +api.prov.error.noDataFileFound=Could not find a file based on ID. + +bagit.sourceOrganization=Dataverse Installation () +bagit.sourceOrganizationAddress= +bagit.sourceOrganizationEmail= + +#Permission.java +permission.addDataverseDataverse=Add a dataverse within another dataverse +permission.deleteDataset=Delete a dataset draft +permission.deleteDataverse=Delete an unpublished dataverse +permission.publishDataset=Publish a dataset +permission.publishDataverse=Publish a dataverse +permission.managePermissionsDataset=Manage permissions for a dataset +permission.managePermissionsDataverse=Manage permissions for a dataverse +permission.editDataset=Edit a dataset's metadata +permission.editDataverse=Edit a dataverse's metadata, facets, customization, and templates +permission.downloadFile=Download a file +permission.viewUnpublishedDataset=View an unpublished dataset and its files +permission.viewUnpublishedDataverse=View an unpublished dataverse +permission.addDatasetDataverse=Add a dataset to a dataverse + +#DataverseUserPage.java +userPage.informationUpdated=Your account information has been successfully updated. +userPage.passwordChanged=Your account password has been successfully changed. +confirmEmail.changed=Your email address has changed and must be re-verified. Please check your inbox at {0} and follow the link we''ve sent. \n\nAlso, please note that the link will only work for the next {1} before it has expired. + +#Dataset.java +dataset.category.documentation=Documentation +dataset.category.data=Data +dataset.category.code=Code + +#DatasetVersionDifference.java +dataset.version.file.added=Files (Added: {0} +dataset.version.file.removed=Files (Removed: {0} +dataset.version.file.removed2=; Removed: {0} +dataset.version.file.replaced=Files (Replaced: {0} +dataset.version.file.replaced2=; Replaced: {0} +dataset.version.file.changed=Files (Changed File Metadata: {0} +dataset.version.file.changed2=; Changed File Metadata: {0} +dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0} +dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0} + +#DataversePage.java +dataverse.item.required=Required +dataverse.item.required.conditional=Conditionally Required +dataverse.item.optional=Optional +dataverse.item.hidden=Hidden +dataverse.edit.msg=Edit Dataverse +dataverse.edit.detailmsg=Edit your dataverse and click Save Changes. Asterisks indicate required fields. +dataverse.feature.update=The featured dataverses for this dataverse have been updated. +dataverse.link.select=You must select a linking dataverse. +dataset.noSelectedDataverse.header=Select Dataverse(s) +dataverse.link.user=Only authenticated users can link a dataverse. +dataverse.link.error=Unable to link {0} to {1}. An internal error occurred. +dataverse.search.user=Only authenticated users can save a search. +dataverse.alias=alias +dataverse.alias.taken=This Alias is already taken. + +#editDatafilesPage.java +dataset.save.fail=Dataset Save Failed + +dataset.files.exist=Files {0} have the same content as {1} that already exists in the dataset. +dataset.file.exist=File {0} has the same content as {1} that already exists in the dataset. +dataset.file.exist.test={0, choice, 1#File |2#Files |} {1} {0, choice, 1#has |2#have |} the same content as {2} that already {0, choice, 1#exist |2#exist |}in the dataset. +dataset.files.duplicate=Files {0} have the same content as {1} that have already been uploaded. +dataset.file.duplicate=File {0} has the same content as {1} that has already been uploaded. +dataset.file.inline.message= This file has the same content as {0}. +dataset.file.upload=Successful {0} is uploaded. +dataset.file.upload.setUp.rsync.failed=Rsync upload setup failed! +dataset.file.upload.setUp.rsync.failed.detail=Unable to find appropriate storage driver. +dataset.file.uploadFailure=upload failure +dataset.file.uploadFailure.detailmsg=the file {0} failed to upload! +dataset.file.uploadWarning=upload warning +dataset.file.uploadWorked=upload worked +dataset.file.upload.popup.explanation.tip=For more information, please refer to the Duplicate Files section of the User Guide. + +#EmailValidator.java +email.invalid=is not a valid email address. + +#URLValidator.java +url.invalid=is not a valid URL. + +#HarvestingClientsPage.java +harvest.start.error=Sorry, harvest could not be started for the selected harvesting client configuration (unknown server error). +harvest.delete.error=Selected harvesting client cannot be deleted; unknown exception: +harvest.create.error=Failed to create a new Harvesting Client configuration: no destination dataverse selected. +harvest.createCommand.error=Harvesting client creation command failed +harvest.create.fail=Harvesting client creation failed (reason unknown). +harvest.update.success=Successfully updated harvesting client +harvest.save.failure1=Failed to save harvesting client +harvest.save.failure2=Failed to save harvesting client (reason unknown). + +#HarvestingSetsPage.java +harvest.oaicreate.fail=Failed to create OAI set +harvest.oaicreate.defaultset.fail=Failed to create the default OAI set +harvest.oaiupdate.fail=Failed to update OAI set. +harvest.oaiupdate.success=Successfully updated OAI set "{0}". +harvest.delete.fail=Failed to delete harvesting set; unknown exception: +harvest.reexport.fail=Sorry, could not start re-export on selected OAI set (unknown server error). +harvest.search.failed=Search failed for the query provided. Message from the Dataverse search server: + +#LoginPage.java +login.Username/Email=Please enter a Username +login.Password=Please enter a Password + +#SystemConfig.java +system.app.terms=There are no Terms of Use for this Dataverse installation. +system.api.terms=There are no API Terms of Use for this Dataverse installation. + +#DatasetPage.java +dataverse.notreleased=DataverseNotReleased +dataverse.release.authenticatedUsersOnly=Only authenticated users can release a dataverse. +dataset.registration.failed=Dataset Registration Failed +dataset.registered=DatasetRegistered +dataset.registered.msg=Your dataset is now registered. +dataset.notlinked=DatasetNotLinked +dataset.notlinked.msg=There was a problem linking this dataset to yours: +datasetversion.archive.success=Archival copy of Version successfully submitted +datasetversion.archive.failure=Error in submitting an archival copy +datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. +datasetversion.update.archive.failure=Dataset Version Update succeeded, but the attempt to update the archival copy failed. +datasetversion.update.success=The published version of your Dataset has been updated. +datasetversion.update.archive.success=The published version of your Dataset, and its archival copy, have been updated. + +#ThemeWidgetFragment.java +theme.validateTagline=Tagline must be at most 140 characters. +theme.urlValidate=URL validation failed. +theme.urlValidate.msg=Please provide URL. +dataverse.save.failed=Dataverse Save Failed - + +#LinkValidator.java +link.tagline.validate=Please enter a tagline for the website to be hyperlinked with. + +#TemplatePage.java +template.save.fail=Template Save Failed +template.create=Template has been created. +template.save=Template has been edited and saved. + +#GuestbookPage.java +guestbook.save.fail=Guestbook Save Failed +guestbook.option.msg= - An Option question requires multiple options. Please complete before saving. +guestbook.create=The guestbook has been created. +guestbook.save=The guestbook has been edited and saved. + +#Shib.java +shib.invalidEmailAddress=The SAML assertion contained an invalid email address: "{0}". +shib.emailAddress.error=A single valid address could not be found. +shib.nullerror=The SAML assertion for "{0}" was null. Please contact support. +dataverse.shib.success=Your Dataverse account is now associated with your institutional account. +shib.convert.fail.deactivated=Your existing account cannot be converted because it has been deactivated. +shib.createUser.fail=Couldn't create user. +shib.duplicate.email.error=Cannot login, because the e-mail address associated with it has changed since previous login and is already in use by another account. + +#IngestServiceBean.java +ingest.failed=ingest failed + +#ManagePermissionsPage.java +permission.roleWasRemoved={0} role for {1} was removed. +permission.defaultPermissionDataverseUpdated=The default permissions for this dataverse have been updated. +permission.roleAssignedToFor={0} role assigned to {1} for {2}. +permission.roleNotAssignedFor={0} role could NOT be assigned to {1} for {2}. +permission.updated=updated +permission.created=created +permission.roleWas=The role was {0}. To assign it to a user and/or group, click on the Assign Roles to Users/Groups button in the Users/Groups section of this page. +permission.roleNotSaved=The role was not able to be saved. +permission.permissionsMissing=Permissions {0} missing. +permission.CannotAssigntDefaultPermissions=Cannot assign default permissions. +permission.default.contributor.role.none.decription=A person who has no permissions on a newly created dataset. Not recommended for dataverses with human contributors. +permission.default.contributor.role.none.name=None +permission.role.must.be.created.by.superuser=Roles can only be created or edited by superusers. +permission.role.not.created.alias.already.exists=Role with this alias already exists. + +#ManageFilePermissionsPage.java +permission.roleNotAbleToBeRemoved=The role assignment was not able to be removed. +permission.fileAccessGranted=File Access request by {0} was granted. +permission.fileAccessRejected=File Access request by {0} was rejected. +permission.roleNotAbleToBeAssigned=The role was not able to be assigned. + +#ManageGroupsPage.java +dataverse.manageGroups.create.success=Successfully created group {0}. Refresh to update your page. +dataverse.manageGroups.save.success=Successfully saved group {0}. +dataverse.manageGroups.delete=The group has been deleted. +dataverse.manageGroups.nodelete=The explicit group cannot be deleted. +dataverse.manageGroups.create.fail=Group Creation failed. +dataverse.manageGroups.edit.fail=Group edit failed. +dataverse.manageGroups.save.fail=Group Save failed. + +#ManageTemplatesPage.java +template.makeDefault=The template has been selected as the default template for this dataverse +template.unselectDefault=The template has been removed as the default template for this dataverse +template.clone=The template has been copied +template.clone.error=Template could not be copied. +template.delete=The template has been deleted +template.delete.error=The dataset template cannot be deleted. +template.update=Template data updated +template.update.error=Template update failed +template.makeDefault.error=The dataset template cannot be made default. +page.copy=Copy of + +#RolePermissionFragment.java +permission.roleAssignedToOn=Role {0} assigned to {1} on {2} +permission.cannotAssignRole=Can''t assign role: {0} +permission.roleRevoked=Role assignment revoked successfully +permission.cannotRevokeRole1=Cannot revoke role assignment - you''re missing permission {0} +permission.cannotRevokeRole2=Cannot revoke role assignment: {0} +permission.roleSave=Role "{0}" saved +permission.cannotSaveRole=Cannot save role {0} + +#GlobalId.java +pid.allowedCharacters=^[A-Za-z0-9._/:\\-]* + +#General Command Exception +command.exception.only.superusers={1} can only be called by superusers. +command.exception.user.deactivated={0} failed: User account has been deactivated. +command.exception.user.deleted={0} failed: User account has been deleted. + +#Admin-API +admin.api.auth.mustBeSuperUser=Forbidden. You must be a superuser. +admin.api.migrateHDL.failure.must.be.set.for.doi=May not migrate while installation protocol set to "hdl". Protocol must be "doi" +admin.api.migrateHDL.failure.must.be.hdl.dataset=Dataset was not registered as a HDL. It cannot be migrated. +admin.api.migrateHDL.success=Dataset migrate HDL registration complete. Dataset re-registered successfully. +admin.api.migrateHDL.failure=Failed to migrate Dataset Handle id: {0} +admin.api.migrateHDL.failureWithException=Failed to migrate Dataset Handle id: {0} Unexpected exception: {1} +admin.api.deleteUser.failure.prefix=Could not delete Authenticated User {0} because +admin.api.deleteUser.failure.dvobjects= the user has created Dataverse object(s) +admin.api.deleteUser.failure.gbResps= the user is associated with file download (Guestbook Response) record(s) +admin.api.deleteUser.failure.roleAssignments=the user is associated with role assignment record(s) +admin.api.deleteUser.failure.versionUser=the user has contributed to dataset version(s) +admin.api.deleteUser.failure.savedSearches=the user has created saved searches +admin.api.deleteUser.success=Authenticated User {0} deleted. + +#Files.java +files.api.metadata.update.duplicateFile=Filename already exists at {0} + +#Datasets.java +datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. +datasets.api.updatePIDMetadata.auth.mustBeSuperUser=Forbidden. You must be a superuser. +datasets.api.updatePIDMetadata.success.for.single.dataset=Dataset {0} PID Metadata updated successfully. +datasets.api.updatePIDMetadata.success.for.update.all=All Dataset PID Metadata update completed successfully. +datasets.api.moveDataset.error.targetDataverseNotFound=Target dataverse not found. +datasets.api.moveDataset.error.suggestForce=Use the query parameter forceMove=true to complete the move. +datasets.api.moveDataset.success=Dataset moved successfully. +datasets.api.listing.error=Fatal error trying to list the contents of the dataset. Please report this error to the Dataverse administrator. +datasets.api.datasize.storage=Total size of the files stored in this dataset: {0} bytes +datasets.api.datasize.download=Total size of the files available for download in this version of the dataset: {0} bytes +datasets.api.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataset. Please report this error to the Dataverse administrator. +datasets.api.grant.role.not.found.error=Cannot find role named ''{0}'' in dataverse {1} +datasets.api.grant.role.cant.create.assignment.error=Cannot create assignment: {0} +datasets.api.grant.role.assignee.not.found.error=Assignee not found +datasets.api.revoke.role.not.found.error="Role assignment {0} not found" +datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2} +datasets.api.privateurl.error.datasetnotfound=Could not find dataset. +datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset. +datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft. +datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published. + + +#Dataverses.java +dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found. +dataverses.api.update.default.contributor.role.success=Default contributor role for Dataverse {0} has been set to {1}. +dataverses.api.update.default.contributor.role.failure.role.does.not.have.dataset.permissions=Role {0} does not have dataset permissions. +dataverses.api.move.dataverse.failure.descendent=Can't move a dataverse to its descendant +dataverses.api.move.dataverse.failure.already.member=Dataverse already in this dataverse +dataverses.api.move.dataverse.failure.itself=Cannot move a dataverse into itself +dataverses.api.move.dataverse.failure.not.published=Published dataverse may not be moved to unpublished dataverse. You may publish {1} and re-try the move. +dataverses.api.move.dataverse.error.guestbook=Dataset guestbook is not in target dataverse. +dataverses.api.move.dataverse.error.template=Dataverse template is not in target dataverse. +dataverses.api.move.dataverse.error.featured=Dataverse is featured in current dataverse. +dataverses.api.move.dataverse.error.metadataBlock=Dataverse metadata block is not in target dataverse. +dataverses.api.move.dataverse.error.dataverseLink=Dataverse is linked to target dataverse or one of its parents. +dataverses.api.move.dataverse.error.datasetLink=Dataset is linked to target dataverse or one of its parents. +dataverses.api.move.dataverse.error.forceMove=Please use the parameter ?forceMove=true to complete the move. This will remove anything from the dataverse that is not compatible with the target dataverse. + +#Access.java +access.api.allowRequests.failure.noDataset=Could not find Dataset with id: {0} +access.api.allowRequests.failure.noSave=Problem saving dataset {0}: {1} +access.api.allowRequests.allows=allows +access.api.allowRequests.disallows=disallows +access.api.allowRequests.success=Dataset {0} {1} file access requests. +access.api.fileAccess.failure.noUser=Could not find user to execute command: {0} +access.api.requestAccess.failure.commandError=Problem trying request access on {0} : {1} +access.api.requestAccess.failure.requestExists=An access request for this file on your behalf already exists. +access.api.requestAccess.failure.invalidRequest=You may not request access to this file. It may already be available to you. +access.api.requestAccess.noKey=You must provide a key to request access to a file. +access.api.requestAccess.fileNotFound=Could not find datafile with id {0}. +access.api.requestAccess.invalidRequest=This file is already available to you for download or you have a pending request +access.api.requestAccess.requestsNotAccepted=Requests for access are not accepted on the Dataset. +access.api.requestAccess.success.for.single.file=Access to File {0} requested. +access.api.rejectAccess.failure.noPermissions=Requestor does not have permission to manage file download requests. +access.api.grantAccess.success.for.single.file=Access to File {0} granted. +access.api.grantAccess.noAssigneeFound=Could not find assignee with identifier {0}. +access.api.grantAccess.failure.commandError=Problem trying grant access on {0} : {1} +access.api.fileAccess.rejectFailure.noRequest=No request for access to file {0} for user {1} +access.api.rejectAccess.success.for.single.file=Access to File {0} rejected. +access.api.revokeAccess.noRoleFound=No File Downloader role found for user {0} +access.api.revokeAccess.success.for.single.file=File Downloader access has been revoked for user {0} on file {1} +access.api.requestList.fileNotFound=Could not find datafile with id {0}. +access.api.requestList.noKey=You must provide a key to get list of access requests for a file. +access.api.requestList.noRequestsFound=There are no access requests for this file {0}. +access.api.exception.metadata.not.available.for.nontabular.file=This type of metadata is only available for tabular files. +access.api.exception.metadata.restricted.no.permission=You do not have permission to download this file. +access.api.exception.version.not.found=Could not find requested dataset version. +access.api.exception.dataset.not.found=Could not find requested dataset. + +#permission +permission.AddDataverse.label=AddDataverse +permission.AddDataset.label=AddDataset +permission.ViewUnpublishedDataverse.label=ViewUnpublishedDataverse +permission.ViewUnpublishedDataset.label=ViewUnpublishedDataset +permission.DownloadFile.label=DownloadFile +permission.EditDataverse.label=EditDataverse +permission.EditDataset.label=EditDataset +permission.ManageDataversePermissions.label=ManageDataversePermissions +permission.ManageDatasetPermissions.label=ManageDatasetPermissions +permission.PublishDataverse.label=PublishDataverse +permission.PublishDataset.label=PublishDataset +permission.DeleteDataverse.label=DeleteDataverse +permission.DeleteDatasetDraft.label=DeleteDatasetDraft + +permission.AddDataverse.desc=Add a dataverse within another dataverse +permission.DeleteDatasetDraft.desc=Delete a dataset draft +permission.DeleteDataverse.desc=Delete an unpublished dataverse +permission.PublishDataset.desc=Publish a dataset +permission.PublishDataverse.desc=Publish a dataverse +permission.ManageDatasetPermissions.desc=Manage permissions for a dataset +permission.ManageDataversePermissions.desc=Manage permissions for a dataverse +permission.EditDataset.desc=Edit a dataset's metadata +permission.EditDataverse.desc=Edit a dataverse's metadata, facets, customization, and templates +permission.DownloadFile.desc=Download a file +permission.ViewUnpublishedDataset.desc=View an unpublished dataset and its files +permission.ViewUnpublishedDataverse.desc=View an unpublished dataverse +permission.AddDataset.desc=Add a dataset to a dataverse + +packageDownload.title=Package File Download +packageDownload.instructions=Use the Download URL in a Wget command or a download manager to download this package file. Download via web browser is not recommended. User Guide - Downloading a Dataverse Package via URL +packageDownload.urlHeader=Download URL + +#mydata_fragment.xhtml +Published=Published +Unpublished=Unpublished +Draft=Draft +In\u0020Review=In Review +Deaccessioned=Deaccessioned + +#Managegroupspage.java +dataverse.manageGroups.user=user +dataverse.manageGroups.users=users +dataverse.manageGroups.group=group +dataverse.manageGroups.groups=groups +dataverse.manageGroups.nomembers=No Members +dataverse.manageGroups.unknown=unknown +dataverse.manageGroups.User=User +dataverse.manageGroups.Group=Group + +#editFilesFragment.xhtml +editfilesfragment.mainlabel=Select Language Encoding... +editfilesfragment.label1=West European +editfilesfragment.label1.item1=Western (ISO-8859-1) +editfilesfragment.label1.item2=Western (ISO-8859-15) +editfilesfragment.label1.item3=Western (Windows-1252) +editfilesfragment.label1.item4=Western (MacRoman) +editfilesfragment.label1.item5=Western (IBM-850) +editfilesfragment.label1.item6=Celtic (ISO-8859-14) +editfilesfragment.label1.item7=Greek (ISO-8859-7) +editfilesfragment.label1.item8=Greek (Windows-1253) +editfilesfragment.label1.item9=Greek (MacGreek) +editfilesfragment.label1.item10=Icelandic (MacIcelandic) +editfilesfragment.label1.item11=Nordic (ISO-8859-10) +editfilesfragment.label1.item12=South European (ISO-8859-3) +editfilesfragment.label2=East European +editfilesfragment.label2.item1=Baltic (ISO-8859-4) +editfilesfragment.label2.item2=Baltic (ISO-8859-13) +editfilesfragment.label2.item3=Baltic (Windows-1257) +editfilesfragment.label2.item4=Cyrillic (ISO-8859-5) +editfilesfragment.label2.item5=Cyrillic (ISO-IR-111) +editfilesfragment.label2.item6=Cyrillic (Windows-1251) +editfilesfragment.label2.item7=Cyrillic (MacCyrillic) +editfilesfragment.label2.item8=Cyrillic/Ukrainian (MacUkrainian) +editfilesfragment.label2.item9=Cyrillic (KOI8-R) +editfilesfragment.label2.item10=Cyrillic/Ukrainian (KOI8-U) +editfilesfragment.label2.item11=Croatian (MacCroatian) +editfilesfragment.label2.item12=Romanian (MacRomanian) +editfilesfragment.label2.item13=Romanian (ISO-8859-16) +editfilesfragment.label2.item14=Central European (ISO-8859-2) +editfilesfragment.label2.item15=Central European (Windows-1250) +editfilesfragment.label2.item16=Central European (MacCE) +editfilesfragment.label2.item17=Cyrillic (IBM-855) +editfilesfragment.label3=East Asian +editfilesfragment.label3.item1=Japanese (ISO-2022-JP) +editfilesfragment.label3.item2=Japanese (Shift_JIS) +editfilesfragment.label3.item3=Japanese (EUC-JP) +editfilesfragment.label3.item4=Chinese Traditional (Big5) +editfilesfragment.label3.item5=Chinese Traditional (Big5-HKSCS) +editfilesfragment.label3.item6=Chinese Traditional (EUC-TW) +editfilesfragment.label3.item7=Chinese Simplified (GB2312) +editfilesfragment.label3.item8=Chinese Simplified (HZ) +editfilesfragment.label3.item9=Chinese Simplified (GBK) +editfilesfragment.label3.item10=Chinese Simplified (ISO-2022-CN) +editfilesfragment.label3.item11=Korean (EUC-KR) +editfilesfragment.label3.item12=Korean (JOHAB) +editfilesfragment.label3.item13=Korean (ISO-2022-KR) +editfilesfragment.label4=Unicode +editfilesfragment.label4.item1=Unicode (UTF-8) +editfilesfragment.label4.item2=Unicode (UTF-16LE) +editfilesfragment.label4.item3=Unicode (UTF-16BE) +editfilesfragment.label5=US-ASCII + +isrequired={0} is required. +isrequired.conditional={0} is required if you choose to enter a value in any of the optional {1} fields. +draftversion=DRAFT VERSION +deaccessionedversion=DEACCESSIONED VERSION + +not_restricted=Not Restricted +editdatafilepage.defaultLanguageEncoding=UTF8 (default) +passwdVal.passwdReq.each=each +passwdVal.passwdReq.uppercase=uppercase +passwdVal.passwdReq.lowercase=lowercase +passwdVal.passwdReq.letter=letter +passwdVal.passwdReq.numeral=numeral +passwdVal.passwdReq.special=special +dataretrieverAPI.noMsgResultsFound=Sorry, no results were found. + +#xlsxfilereader.java +xlsxfilereader.ioexception.parse=Could not parse Excel/XLSX spreadsheet. {0} +xlsxfilereader.ioexception.norows=No rows of data found in the Excel (XLSX) file. +xlsxfilereader.ioexception.onlyonerow=Only one row of data (column name header?) detected in the Excel (XLSX) file. +xlsxfilereader.ioexception.failed=Failed to read line {0} during the second pass. +xlsxfilereader.ioexception.mismatch=Reading mismatch, line {0} during the second pass: {1} delimited values expected, {2} found. +xlsxfilereader.ioexception.linecount=Mismatch between line counts in first and final passes! + +#rtabfileparser.java +rtabfileparser.ioexception.failed=Failed to read line {0} of the Data file. +rtabfileparser.ioexception.mismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +rtabfileparser.ioexception.boolean=Unexpected value for the Boolean variable ({0}): +rtabfileparser.ioexception.read=Couldn't read Boolean variable ({0})! +rtabfileparser.ioexception.parser1=R Tab File Parser: Could not obtain varQnty from the dataset metadata. +rtabfileparser.ioexception.parser2=R Tab File Parser: varQnty=0 in the dataset metadata! + +#ConfigureFragmentBean.java +configurefragmentbean.apiTokenGenerated=API Token will be generated. Please keep it secure as you would do with a password. + +#FacetCategory - staticSearchFields +staticSearchFields.dvCategory=Dataverse Category +staticSearchFields.metadataSource=Metadata Source +staticSearchFields.publicationDate=Publication Year +staticSearchFields.fileTypeGroupFacet=File Type +staticSearchFields.dvObjectType=Type +staticSearchFields.fileTag=File Tag +staticSearchFields.fileAccess=Access +staticSearchFields.publicationStatus=Publication Status +staticSearchFields.subject_ss=Subject + +#dataverse category - Facet Labels +Researcher=Researcher +Research\u0020Project=Research Project +Journal=Journal +Organization\u0020or\u0020Institution=Organization or Institution +Teaching\u0020Course=Teaching Course +Research\u0020Group=Research Group +Laboratory=Laboratory +Department=Department +Uncategorized=Uncategorized + +#filetype - Facet Labels +Document=Document +Text=Text +Tabular\u0020Data=Tabular Data +Data=Data +FITS=FITS +Shape=Shape +Image=Image +Network\u0020Data=Network Data +Unknown=Unknown +Documentation=Documentation +Code=Code +Archive=Archive +Audio=Audio +Video=Video + +#access - Facet Labels +Public=Public +Restricted=Restricted + + +#Shibboleth login +idp.fatal.divMissing=
specified as "insertAtDiv" could not be located in the HTML +idp.fatal.noXMLHttpRequest=Browser does not support XMLHttpRequest, unable to load IdP selection data +idp.fatal.wrongProtocol=Policy supplied to DS was not "urn:oasis:names:tc:SAML:profiles:SSO:idpdiscovery-protocol:single" +idp.fatal.wrongEntityId=entityId supplied by SP did not match configuration +idp.fatal.noData=Metadata download returned no data +idp.fatal.loadFailed=Failed to download metadata from +idp.fatal.noparms=No parameters to discovery session and no defaultReturn parameter configured +idp.fatal.noReturnURL=No URL return parameter provided +idp.fatal.badProtocol=Return request must start with https:// or http:// +idp.idpPreferred.label=Use a previous selection: +idp.idpEntry.label=Or enter your institution's name. +idp.idpEntry.NoPreferred.label=Enter your institution's name and click "Continue" to log in via your institution's authentication system. +idp.idpList.label=Or select your institution from the list below. +idp.idpList.NoPreferred.label=Select your institution and click "Continue" to log in via your institution's authentication system. +idp.idpList.defaultOptionLabel=Please select... +idp.idpList.showList=Allow me to pick from a list +idp.idpList.showSearch=Allow me to type the name of my institution +idp.submitButton.label=Continue +idp.helpText=Help +idp.defaultLogoAlt= + +#externaltools +externaltools.dct.displayname=Data Curation Tool +externaltools.dct.description=Data Curation Tool for curation of variables +externaltools.explorer.displayname=Data Explorer +externaltools.explorer.description=The Data Explorer provides a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. + +# api/admin/datasetfield/load +api.admin.datasetfield.load.ArrayIndexOutOfBoundMessage=Error parsing metadata block in {0} part, line #{1}: missing ''{2}'' column (#{3}) +api.admin.datasetfield.load.GeneralErrorMessage=Error parsing metadata block in {0} part, line #{1}: {2} + +#PIDs +pids.api.reservePid.success=PID reserved for {0} +pids.api.deletePid.success=PID deleted for {0} +pids.deletePid.failureExpected=Unable to delete PID {0}. Status code: {1}. +pids.deletePid.failureOther=Problem deleting PID {0}: {1} +pids.commands.reservePid.failure=Problem reserving PID for dataset id {0}: {1}. +pids.datacite.errors.noResponseCode=Problem getting HTTP status code from {0}. Is it in DNS? Is doi.dataciterestapiurlstring configured properly? +pids.datacite.errors.DoiOnly=Only doi: is supported. + +#PublishDatasetCommand +publishDatasetCommand.pidNotReserved=Cannot publish dataset because its persistent identifier has not been reserved. + +# APIs +api.errors.invalidApiToken=Invalid API token. diff --git a/distros/dataverse.no/modification/analytics.xhtml b/distros/dataverse.no/modification/analytics.xhtml new file mode 100644 index 0000000..a42753d --- /dev/null +++ b/distros/dataverse.no/modification/analytics.xhtml @@ -0,0 +1,23 @@ + + + + + + diff --git a/distros/dataverse.no/modification/dataverse_footer.xhtml b/distros/dataverse.no/modification/dataverse_footer.xhtml new file mode 100644 index 0000000..01be3ed --- /dev/null +++ b/distros/dataverse.no/modification/dataverse_footer.xhtml @@ -0,0 +1,92 @@ + +
+
+
+ +
+
+
+ + +
diff --git a/distros/dataverse.no/modification/dataverse_header.xhtml b/distros/dataverse.no/modification/dataverse_header.xhtml new file mode 100644 index 0000000..775c0bf --- /dev/null +++ b/distros/dataverse.no/modification/dataverse_header.xhtml @@ -0,0 +1,407 @@ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + +
+
+ + +
+
+
+
+
+ + + +
+ + + +
+ + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + + + +
+ + + + +
+
+
+
diff --git a/distros/dataverse.no/modification/loginpage.xhtml b/distros/dataverse.no/modification/loginpage.xhtml new file mode 100644 index 0000000..be00efa --- /dev/null +++ b/distros/dataverse.no/modification/loginpage.xhtml @@ -0,0 +1,238 @@ + + + + + + + + + + + + + + + + + + +
+
+
+ + + + + +
+
+
+
+
+ +
+
+

+ +
+ + + + +
+ +
+ + + + +
+
+
+
+ +
+ +
+ + + + +
+
+
+ + +
+
+

+ +

+ + + + + +
+
+ +
+
+ +
+
+ +
+
+
+ +

+ + + + + + + + + + +

+
+ +
+
+ +
+ +
+
+
+ + + + + +
+ + +
+ +

+ + + + + + + + + + + + + +

+
+
+
+ + + + + + +

+ +

+

+ +

+
+
+
+
+ +
+

#{bundle['auth.providers.title']}

+ + + + + + + +
+

+ + + + +

+
+
+ + +
+
+
+
+
+
+ From 528089e0732dcabbf8a35b46c528ec5e2c7eb950 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 12 Jul 2022 09:17:16 +0000 Subject: [PATCH 015/354] analytics initialisation --- distros/dataverse.no/init.d/100-analytics.sh | 1 + 1 file changed, 1 insertion(+) create mode 100644 distros/dataverse.no/init.d/100-analytics.sh diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/distros/dataverse.no/init.d/100-analytics.sh @@ -0,0 +1 @@ + From f197f8b2433c2420ea02e673f3d6359af8e3619a Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 12 Jul 2022 09:30:05 +0000 Subject: [PATCH 016/354] analytics initialization update --- distros/dataverse.no/init.d/100-analytics.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh index 8b13789..cb1c4ba 100644 --- a/distros/dataverse.no/init.d/100-analytics.sh +++ b/distros/dataverse.no/init.d/100-analytics.sh @@ -1 +1,2 @@ - +curl -o $DOCROOT_DIR/analytics.xhtml https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/analytics.xhtml +curl -X PUT -d $DOCROOT_DIR/analytics.xhtml http://localhost:8080/api/admin/settings/:WebAnalyticsCode From e88d9a08ed79aa4434e4d303fdf410c6f86795c8 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 12 Jul 2022 09:47:04 +0000 Subject: [PATCH 017/354] analytics init update --- distros/dataverse.no/init.d/100-analytics.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh index cb1c4ba..3c9c536 100644 --- a/distros/dataverse.no/init.d/100-analytics.sh +++ b/distros/dataverse.no/init.d/100-analytics.sh @@ -1,2 +1,6 @@ curl -o $DOCROOT_DIR/analytics.xhtml https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/analytics.xhtml -curl -X PUT -d $DOCROOT_DIR/analytics.xhtml http://localhost:8080/api/admin/settings/:WebAnalyticsCode + +if [ ! -z "$WEBANALYTICSON" ] +then + curl -X PUT -d $DOCROOT_DIR/analytics.xhtml http://localhost:8080/api/admin/settings/:WebAnalyticsCode +fi From 816d157ec799d2591ae854635e35af4de57d29b5 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 12 Jul 2022 10:43:40 +0000 Subject: [PATCH 018/354] Bundle.property update --- .../modification/Bundle.properties | 24 ++----------------- 1 file changed, 2 insertions(+), 22 deletions(-) diff --git a/distros/dataverse.no/modification/Bundle.properties b/distros/dataverse.no/modification/Bundle.properties index 6266510..93ddde7 100644 --- a/distros/dataverse.no/modification/Bundle.properties +++ b/distros/dataverse.no/modification/Bundle.properties @@ -188,10 +188,7 @@ wasReturnedByReviewer=, was returned by the curator of # TODO: Confirm that "toReview" can be deleted. toReview=Don't forget to publish it or send it back to the contributor! # Bundle file editors, please note that "notification.welcome" is used in a unit test. -#notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address. -#UB notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check our user guides at http://site.uit.no/dataverseno/deposit/ or contact DataverseNO support for assistance: http://site.uit.no/dataverseno/support/. - notification.demoSite=Demo Site notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}). notification.grantFileAccess=Access granted for files in dataset: {0}. @@ -306,16 +303,8 @@ login.System=Login System login.forgot.text=Forgot your password? login.builtin=Dataverse Account login.institution=Institutional Account -#login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. -#UB login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. - - -#login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance. -#UB login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance. - - login.builtin.credential.usernameOrEmail=Username/Email login.builtin.credential.password=Password login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? @@ -715,10 +704,7 @@ notification.email.wasSubmittedForReview={0} (view at {1}) was submitted for rev notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}). notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}). notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. -#notification.email.closing=\n\nYou may contact us for support at {0}.\n\nThank you,\n{1} -#UB notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1} - notification.email.closing.html=

You may contact us for support at {0}.

Thank you,
{1} notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}). notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}). @@ -890,10 +876,7 @@ dataverse.results.btn.addData.newDataverse=New Dataverse dataverse.results.btn.addData.newDataset=New Dataset dataverse.results.dialog.addDataGuest.header=Add Data dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset. -#dataverse.results.dialog.addDataGuest.msg.signup=Sign up or log in to create a dataverse or add a dataset. -#UB: dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset. - dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account dataverse.results.types.dataverses=Dataverses @@ -1674,11 +1657,8 @@ file.download.filetype.unknown=Original File Format file.more.information.link=Link to more file information for file.requestAccess=Request Access file.requestAccess.dialog.msg=You need to Log In to request access. -#file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. -#UB -fietAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. - -.accessRequested=Access Requested +file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. +file.accessRequested=Access Requested file.ingestInProgress=Ingest in progress... file.dataFilesTab.metadata.header=Metadata file.dataFilesTab.metadata.addBtn=Add + Edit Metadata From 8542da5eb58827b44f5a089dc3165b8b1785115b Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 12 Jul 2022 10:52:24 +0000 Subject: [PATCH 019/354] Bundle.properties.patch --- .../modification/Bundle.properties.patch | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 distros/dataverse.no/modification/Bundle.properties.patch diff --git a/distros/dataverse.no/modification/Bundle.properties.patch b/distros/dataverse.no/modification/Bundle.properties.patch new file mode 100644 index 0000000..1a04e63 --- /dev/null +++ b/distros/dataverse.no/modification/Bundle.properties.patch @@ -0,0 +1,49 @@ +--- Bundle.properties 2021-08-04 19:13:08.000000000 +0000 ++++ /root/git/dataverse-docker/distros/dataverse.no/modification/Bundle.properties 2022-07-12 10:41:34.201813777 +0000 +@@ -188,7 +188,7 @@ + # TODO: Confirm that "toReview" can be deleted. + toReview=Don't forget to publish it or send it back to the contributor! + # Bundle file editors, please note that "notification.welcome" is used in a unit test. +-notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address. ++notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check our user guides at http://site.uit.no/dataverseno/deposit/ or contact DataverseNO support for assistance: http://site.uit.no/dataverseno/support/. + notification.demoSite=Demo Site + notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}). + notification.grantFileAccess=Access granted for files in dataset: {0}. +@@ -303,8 +303,8 @@ + login.forgot.text=Forgot your password? + login.builtin=Dataverse Account + login.institution=Institutional Account +-login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. +-login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance. ++login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. ++login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance. + login.builtin.credential.usernameOrEmail=Username/Email + login.builtin.credential.password=Password + login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? +@@ -704,7 +704,7 @@ + notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}). + notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}). + notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. +-notification.email.closing=\n\nYou may contact us for support at {0}.\n\nThank you,\n{1} ++notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1} + notification.email.closing.html=

You may contact us for support at {0}.

Thank you,
{1} + notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}). + notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}). +@@ -876,7 +876,7 @@ + dataverse.results.btn.addData.newDataset=New Dataset + dataverse.results.dialog.addDataGuest.header=Add Data + dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset. +-dataverse.results.dialog.addDataGuest.msg.signup=Sign up or log in to create a dataverse or add a dataset. ++dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset. + dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account + dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account + dataverse.results.types.dataverses=Dataverses +@@ -1657,7 +1657,7 @@ + file.more.information.link=Link to more file information for + file.requestAccess=Request Access + file.requestAccess.dialog.msg=You need to Log In to request access. +-file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. ++file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. + file.accessRequested=Access Requested + file.ingestInProgress=Ingest in progress... + file.dataFilesTab.metadata.header=Metadata From 15e7dd30763e9d91f860b86e75914c09f1a26b41 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 12 Jul 2022 11:04:22 +0000 Subject: [PATCH 020/354] patch uit customisation --- distros/dataverse.no/init.d/0001-bundle.sh | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 distros/dataverse.no/init.d/0001-bundle.sh diff --git a/distros/dataverse.no/init.d/0001-bundle.sh b/distros/dataverse.no/init.d/0001-bundle.sh new file mode 100644 index 0000000..77eba03 --- /dev/null +++ b/distros/dataverse.no/init.d/0001-bundle.sh @@ -0,0 +1,5 @@ +apt-get install patch -y +curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch +patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch + + From fdb9fbfbf137c4c40c820fca4f2b104fb373cf37 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Thu, 14 Jul 2022 09:41:53 +0000 Subject: [PATCH 021/354] updated afiliation script, require python3.7 or more --- .../dataverse.no/init.d/affiliations/affiliation2data.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/affiliations/affiliation2data.py b/distros/dataverse.no/init.d/affiliations/affiliation2data.py index 232e499..c889325 100644 --- a/distros/dataverse.no/init.d/affiliations/affiliation2data.py +++ b/distros/dataverse.no/init.d/affiliations/affiliation2data.py @@ -1,4 +1,5 @@ import pandas as pd +import json # id | dvno_affiliation | dvno_group_name | dvno_email_level #-----+-------------------------------------+------------------+------------------ @@ -20,7 +21,10 @@ def reload_affiliations(loc): #print(str(affiliations.iloc[[i]]['dvno_affiliation'].values[0])) dvno_email_level = len(str(affiliations.iloc[[i]]['dvno_group_name']).split('.')) #print(subdomains) - sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliations.iloc[[i]]['dvno_affiliation'].values[0], affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level) + affiliation = affiliations.iloc[[i]]['dvno_affiliation'].values[0] + affiliation = str(affiliation).replace("'", "\'\'") + #affiliation = "q['%s']" % affiliation + sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliation, affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level) print(sql) return From 0fef3ccea307680734b7c7d55a261293d053b230 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Thu, 14 Jul 2022 09:53:10 +0000 Subject: [PATCH 022/354] init.d script for afiliation triggeer --- distros/dataverse.no/init.d/202-trigger.sh | 5 +++++ 1 file changed, 5 insertions(+) create mode 100755 distros/dataverse.no/init.d/202-trigger.sh diff --git a/distros/dataverse.no/init.d/202-trigger.sh b/distros/dataverse.no/init.d/202-trigger.sh new file mode 100755 index 0000000..cb45abe --- /dev/null +++ b/distros/dataverse.no/init.d/202-trigger.sh @@ -0,0 +1,5 @@ +#!/bin/bash +python3 ./affiliations/affiliation2data.py > /tmp/affiliations.sql +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/extratrigger.sql +psql -U dataverse dataverse -h postgres -f /tmp/affiliations.sql From 3454b89066890689f5cb31dc657bc5b143974f44 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 20 Jul 2022 09:44:17 +0000 Subject: [PATCH 023/354] Added afiliation Curl for feide field --- distros/dataverse.no/init.d/023-afilliation.sh | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 distros/dataverse.no/init.d/023-afilliation.sh diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh new file mode 100644 index 0000000..1510d7a --- /dev/null +++ b/distros/dataverse.no/init.d/023-afilliation.sh @@ -0,0 +1,3 @@ +#!/bin/bash +curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute + From aa58d547f36899c8e68fe423e78ee1d8df2b1ac5 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 20 Jul 2022 09:52:04 +0000 Subject: [PATCH 024/354] updatetrigger.sql --- .../init.d/affiliations/updatetrigger.sql | 22 +++++++++++++++++++ .../init.d/affiliations/updatetrigger.sql.2 | 2 ++ 2 files changed, 24 insertions(+) create mode 100644 distros/dataverse.no/init.d/affiliations/updatetrigger.sql create mode 100644 distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 diff --git a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql new file mode 100644 index 0000000..95e6530 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql @@ -0,0 +1,22 @@ +CREATE TABLE IF NOT EXISTS public.dvnoaffiliations ( + id bigint, + dvno_affiliation character varying(255) DEFAULT NULL::character varying, + dvno_group_name character varying(255) DEFAULT NULL::character varying, + dvno_email_level integer DEFAULT 2 +); + +ALTER TABLE public.dvnoaffiliations OWNER TO dataverse; + +CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN +IF NEW.actionsubtype='login' THEN + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); +END IF +RETURN NULL; +END; +$$ +CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); diff --git a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 new file mode 100644 index 0000000..c1307ec --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 @@ -0,0 +1,2 @@ +DROP TRIGGER affiliation_trigger_actionlog on public.actionlogrecord; +CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); From b3c9176edde2e99e54ee3219163f5c26e5431267 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 22 Jul 2022 08:04:25 +0000 Subject: [PATCH 025/354] new trigger solution --- .../affiliations/builtinuser_trigger.sql | 20 +++++++++++++++++++ .../affiliations/builtinuser_trigger.sql.2 | 1 + 2 files changed, 21 insertions(+) create mode 100644 distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql create mode 100644 distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql new file mode 100644 index 0000000..8c5ecb8 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS public.dvnoaffiliations ( + id bigint, + dvno_affiliation character varying(255) DEFAULT NULL::character varying, + dvno_group_name character varying(255) DEFAULT NULL::character varying, + dvno_email_level integer DEFAULT 2 +); + +ALTER TABLE public.dvnoaffiliations OWNER TO dataverse; + +CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); +RETURN NULL; +END; +$$ +CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 new file mode 100644 index 0000000..cb2835b --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 @@ -0,0 +1 @@ +CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); From 4136c07a36bf462ec0c9cb712447a7e917c1286e Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 22 Jul 2022 09:17:39 +0000 Subject: [PATCH 026/354] updated docker-compose.yaml --- distros/dataverse.no/docker-compose.yaml | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index a625eba..9afb711 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -47,15 +47,18 @@ services: - "POSTGRES_PASSWORD" - "POSTGRES_PORT" volumes: - - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down - # - /mntblob/database-data-prod:/var/lib/postgresql/data/ - # - /extdisk/database-data-demo:/var/lib/postgresql/data/ + #- ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down + - ${POSTGRESTMP}/:/mnttmp/ + - /extdisk/database-data-prod:/var/lib/postgresql/data/ + # - /extdisk/database-data-demo:/var/lib/postgresql/data/ + shibboleth: networks: - traefik - image: ${DOCKER_HUB}/shibboleth:3.1.0 + image: shibboleth:3.3.0 + #image: ${DOCKER_HUB}/shibboleth:3.1.0 container_name: shibboleth privileged: true ports: @@ -170,7 +173,8 @@ services: dataverse: networks: - traefik - image: coronawhy/dataverse:${VERSION} + image: ${DOCKER_HUB}/dataverse:${VERSION} + #image: coronawhy/dataverse:${VERSION} container_name: dataverse privileged: true user: @@ -250,13 +254,14 @@ services: - "RSERVE_USER=rserve" - "RSERVE_PASSWORD=rserve" - "JVM_OPTS='-Xmx4g -Xms4g -XX:MaxPermSize=4g -XX:PermSize=4g'" + - "WEBANALYTICSON" depends_on: - postgres - solr volumes: - ${CONFIGURATION_PATH}/secrets:/secrets - ${LOCAL_STORAGE}/data:/data - - ${DOCROOT}/docroot:/opt/docroot + - ${DOCROOT}/docroot:/opt/payara/docroot - ./configs/domain.xml:/opt/payara/domain.xml - ./init.d:/opt/payara/init.d - /mnt:/mnt From 78e9a8efca816f50716a43082fa3343ae095112f Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 22 Jul 2022 09:21:58 +0000 Subject: [PATCH 027/354] update docker compose --- distros/dataverse.no/docker-compose.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 9afb711..f7eabae 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -57,8 +57,8 @@ services: shibboleth: networks: - traefik - image: shibboleth:3.3.0 - #image: ${DOCKER_HUB}/shibboleth:3.1.0 + #image: shibboleth:3.3.0 + image: ${DOCKER_HUB}/shibboleth:3.3.0 container_name: shibboleth privileged: true ports: From 1367d15d49f9b55375be76c003e8910085a604a1 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 25 Jul 2022 09:17:05 +0200 Subject: [PATCH 028/354] Update docker-compose.yaml fix postgres location --- distros/dataverse.no/docker-compose.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 9afb711..f1552a7 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -47,10 +47,10 @@ services: - "POSTGRES_PASSWORD" - "POSTGRES_PORT" volumes: - #- ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down + - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down - ${POSTGRESTMP}/:/mnttmp/ - - /extdisk/database-data-prod:/var/lib/postgresql/data/ - # - /extdisk/database-data-demo:/var/lib/postgresql/data/ + #- /extdisk/database-data-prod:/var/lib/postgresql/data/ + #- /extdisk/database-data-demo:/var/lib/postgresql/data/ From 6743a2c9ff1005e8d685f5ad119c2eccd24ee68a Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Mon, 25 Jul 2022 07:35:08 +0000 Subject: [PATCH 029/354] fixed init.d --- distros/dataverse.no/init.d/005-reindex.sh | 3 +++ distros/dataverse.no/init.d/100-analytics.sh | 0 distros/dataverse.no/init.d/201-bundle.sh | 7 +++++++ distros/dataverse.no/init.d/202-trigger.sh | 5 +++-- .../init.d/affiliations/.updatetrigger.sql.swp | Bin 0 -> 12288 bytes .../init.d/affiliations/extratrigger.sql | 9 --------- .../init.d/affiliations/extratrigger.sql.2 | 1 + .../init.d/affiliations/extratrigger.sql.3 | 12 ++++++++++++ 8 files changed, 26 insertions(+), 11 deletions(-) create mode 100755 distros/dataverse.no/init.d/005-reindex.sh mode change 100644 => 100755 distros/dataverse.no/init.d/100-analytics.sh create mode 100755 distros/dataverse.no/init.d/201-bundle.sh create mode 100644 distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp create mode 100644 distros/dataverse.no/init.d/affiliations/extratrigger.sql.2 create mode 100644 distros/dataverse.no/init.d/affiliations/extratrigger.sql.3 diff --git a/distros/dataverse.no/init.d/005-reindex.sh b/distros/dataverse.no/init.d/005-reindex.sh new file mode 100755 index 0000000..465d5b0 --- /dev/null +++ b/distros/dataverse.no/init.d/005-reindex.sh @@ -0,0 +1,3 @@ +# Reindex all datasets +curl http://localhost:8080/api/admin/index/clear +curl http://localhost:8080/api/admin/index diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh old mode 100644 new mode 100755 diff --git a/distros/dataverse.no/init.d/201-bundle.sh b/distros/dataverse.no/init.d/201-bundle.sh new file mode 100755 index 0000000..8e30c1b --- /dev/null +++ b/distros/dataverse.no/init.d/201-bundle.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +/usr/bin/apt-get install patch -y +/usr/bin/curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch +/usr/bin/patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch + + diff --git a/distros/dataverse.no/init.d/202-trigger.sh b/distros/dataverse.no/init.d/202-trigger.sh index cb45abe..0d371ea 100755 --- a/distros/dataverse.no/init.d/202-trigger.sh +++ b/distros/dataverse.no/init.d/202-trigger.sh @@ -1,5 +1,6 @@ #!/bin/bash -python3 ./affiliations/affiliation2data.py > /tmp/affiliations.sql +python3 ${INIT_SCRIPTS_FOLDER}/affiliations/affiliation2data.py > /tmp/affiliations.sql export PGPASSWORD=`cat /secrets/db/password` -psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/extratrigger.sql +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql.2 psql -U dataverse dataverse -h postgres -f /tmp/affiliations.sql diff --git a/distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp b/distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp new file mode 100644 index 0000000000000000000000000000000000000000..4db26fc7fe8f54dfabee21e2c71f00d4a80c7fab GIT binary patch literal 12288 zcmeI2&2G~`5XZM1`4&hVVM;3{QQM>~Dpaj1;igWA6rAGNltZP+u{Y@o$2RLtN-ppY zJP#6_xNzcaK;ppIag&fBK@Uh2jitXWZ)QK{H#;jup7#3l+Edsrt~0!DGNwvj+B=sS zTe!!V3`3PP|L#&eR%>49zT~p#Im#Ju8S`Qg&h~qg3|Q(4RT^7m-AbN_l?>y_57@mV z0-=gt$?0^2FB~PpATISI?8}uD-7d}+zw&1%p?4xc1m+3!RcCGcn*CsNBTeiDMRJ6d}qIZ`v5AS5pg_&peN)IAH1c(3;AOb{y z2oM1xKm>>Y5g-CY-~tkGJjU+g_x^XNw)$7MxLd@0lviH0}5VofLfQ}4%khD%U#&CMVB~P1MT^aVHR^asbUfgeEX{-pk zdERq`zY4kI=F0I={`eH{E8}}1R{$sQ;2h>6fPBn-?yA{If4#ub1c`^jOC0V9^mS?w z$-De@)RH{%9hc{SHpF#5&fyK0nR-7Pp^@{WR`fv)Bfgiz2#X*G7(tw=J%m^}QYq0x zN<4PK7d@e11B3Jrh2xI{kr+fTNqi#~RouxS@rl}v+wXzNs+bCPLsU)6HN3>0{unE~oz?fD~4ggEruaDDsF0mR% zMq<-|aoIU$aJFkw>}h7Td_LelhD6~$mT;g~G`nuWX1e~_=(s12#LbF!14j;%?A+Yi zDvUcNr^L?AoYCrJtf|b0IlFTVCOl{h%o4SFMs&oYhT&*fc-GaLR`cwDn Date: Mon, 25 Jul 2022 07:46:38 +0000 Subject: [PATCH 030/354] removed unused script --- distros/dataverse.no/init.d/0001-bundle.sh | 5 ----- .../dataverse.no/init.d/012-minio-bucket1.sh | 17 ----------------- distros/dataverse.no/init.d/105-reindex.sh | 3 --- 3 files changed, 25 deletions(-) delete mode 100644 distros/dataverse.no/init.d/0001-bundle.sh delete mode 100755 distros/dataverse.no/init.d/012-minio-bucket1.sh delete mode 100755 distros/dataverse.no/init.d/105-reindex.sh diff --git a/distros/dataverse.no/init.d/0001-bundle.sh b/distros/dataverse.no/init.d/0001-bundle.sh deleted file mode 100644 index 77eba03..0000000 --- a/distros/dataverse.no/init.d/0001-bundle.sh +++ /dev/null @@ -1,5 +0,0 @@ -apt-get install patch -y -curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch -patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch - - diff --git a/distros/dataverse.no/init.d/012-minio-bucket1.sh b/distros/dataverse.no/init.d/012-minio-bucket1.sh deleted file mode 100755 index 6fe5e7b..0000000 --- a/distros/dataverse.no/init.d/012-minio-bucket1.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -# MinIO bucket activation -# https://guides.dataverse.org/en/latest/installation/config.html#id87 -if [ "${minio_label_1}" ]; then - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.type\=s3" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.label\=${minio_label_1}" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.bucket-name\=${minio_bucket_1}" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.download-redirect\=false" -# asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.upload-redirect=true" -# asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.ingestsizelimit=13107200" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.url-expiration-minutes\=120" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.connection-pool-size\=4096" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.profile\=${minio_profile_1}" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.custom-endpoint-url\=${minio_custom_endpoint}" - asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.path-style-access\=true" -fi diff --git a/distros/dataverse.no/init.d/105-reindex.sh b/distros/dataverse.no/init.d/105-reindex.sh deleted file mode 100755 index 465d5b0..0000000 --- a/distros/dataverse.no/init.d/105-reindex.sh +++ /dev/null @@ -1,3 +0,0 @@ -# Reindex all datasets -curl http://localhost:8080/api/admin/index/clear -curl http://localhost:8080/api/admin/index From 59f35fbe936a3cf770669284a6fde354fde64651 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Mon, 25 Jul 2022 08:12:26 +0000 Subject: [PATCH 031/354] updated affiliations/builtinuser_trigger.sql --- distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql | 1 - 1 file changed, 1 deletion(-) diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql index 8c5ecb8..6c8fd47 100644 --- a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql +++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql @@ -17,4 +17,3 @@ BEGIN RETURN NULL; END; $$ -CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); From cf1ee5f7dcfc6445fdb7839fb9223d6703df0ffb Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 27 Jul 2022 07:37:03 +0000 Subject: [PATCH 032/354] Curl comment for Feide affiliation option --- distros/dataverse.no/init.d/023-afilliation.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh index 1510d7a..51fcd94 100644 --- a/distros/dataverse.no/init.d/023-afilliation.sh +++ b/distros/dataverse.no/init.d/023-afilliation.sh @@ -1,3 +1,4 @@ #!/bin/bash curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute +curl -X PUT -d True http://localhost:8080/api/admin/settings/ShibAffiliationFeide From 7992e09bbc62c337872682e424fab302bfef0892 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 27 Jul 2022 09:13:20 +0000 Subject: [PATCH 033/354] fixed affiliation curl --- distros/dataverse.no/init.d/023-afilliation.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh index 51fcd94..8921267 100644 --- a/distros/dataverse.no/init.d/023-afilliation.sh +++ b/distros/dataverse.no/init.d/023-afilliation.sh @@ -1,4 +1,4 @@ #!/bin/bash curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute -curl -X PUT -d True http://localhost:8080/api/admin/settings/ShibAffiliationFeide +curl -X PUT -d True http://localhost:8080/api/admin/settings/:ShibAffiliationFeide From a6c6115215e89a61e7a56151b0b26d04de64101c Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 5 Aug 2022 09:42:42 +0000 Subject: [PATCH 034/354] Added script to backup and delete old logs from database table actionlogrecord --- .../init.d/cronjob/actionlog/actionlogrecord.sh | 7 +++++++ .../init.d/cronjob/actionlog/actionlogrecord.sql | 2 ++ 2 files changed, 9 insertions(+) create mode 100644 distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh create mode 100644 distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh new file mode 100644 index 0000000..1acad6e --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh @@ -0,0 +1,7 @@ +#!/bin/bash +export PGPASSWORD=`cat /secrets/db/password` +pg_dump -U dataverse dataverse -h postgres -t actionlogrecord > /tmp/actionlogrecord$(date +'%Y%m%d').dump +gzip --force /tmp/actionlogrecord$(date +'%Y%m%d').dump +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/actionlog/actionlogrecord.sql + + diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql new file mode 100644 index 0000000..9a90101 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql @@ -0,0 +1,2 @@ +DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days'; + From 0c6f36354ddfee3b0af7fbcab2856cc320e5ceaf Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 5 Aug 2022 10:36:05 +0000 Subject: [PATCH 035/354] updatted database log loccation --- .../dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh index 1acad6e..4cfd6fc 100644 --- a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh +++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh @@ -1,7 +1,7 @@ #!/bin/bash export PGPASSWORD=`cat /secrets/db/password` -pg_dump -U dataverse dataverse -h postgres -t actionlogrecord > /tmp/actionlogrecord$(date +'%Y%m%d').dump -gzip --force /tmp/actionlogrecord$(date +'%Y%m%d').dump +pg_dump -U dataverse dataverse -h postgres -t actionlogrecord > ${DATA_DIR}/actionlogrecord$(date +'%Y%m%d').dump +gzip --force ${DATA_DIR}/actionlogrecord$(date +'%Y%m%d').dump psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/actionlog/actionlogrecord.sql From 60e49227a2c7a68ef8c21a31771a2bd7e5c18b46 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Mon, 22 Aug 2022 07:35:37 +0000 Subject: [PATCH 036/354] added persistant index functionality --- distros/dataverse.no/docker-compose.yaml | 2 +- distros/dataverse.no/init.d/005-reindex.sh | 3 --- distros/dataverse.no/init.d/023-afilliation.sh | 4 ++-- distros/dataverse.no/migration/create-backup-db.sh | 6 +++++- 4 files changed, 8 insertions(+), 7 deletions(-) delete mode 100755 distros/dataverse.no/init.d/005-reindex.sh diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 6a67076..23d161f 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -92,7 +92,7 @@ services: - "SOLR_JAVA_MEM=-Xms4g -Xmx4g" - "SOLR_OPTS=-Dlog4j2.formatMsgNoLookups=true" volumes: - - solr-data:/opt/solr/server/solr/collection1/data + - solr-data:/var/solr/data - ./configs/schema.xml:/var/solr/data/collection1/conf/schema.xml labels: - "traefik.enable=true" diff --git a/distros/dataverse.no/init.d/005-reindex.sh b/distros/dataverse.no/init.d/005-reindex.sh deleted file mode 100755 index 465d5b0..0000000 --- a/distros/dataverse.no/init.d/005-reindex.sh +++ /dev/null @@ -1,3 +0,0 @@ -# Reindex all datasets -curl http://localhost:8080/api/admin/index/clear -curl http://localhost:8080/api/admin/index diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh index 8921267..686b39a 100644 --- a/distros/dataverse.no/init.d/023-afilliation.sh +++ b/distros/dataverse.no/init.d/023-afilliation.sh @@ -1,4 +1,4 @@ #!/bin/bash curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute -curl -X PUT -d True http://localhost:8080/api/admin/settings/:ShibAffiliationFeide - +curl -X PUT -d "lastAffiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationOrder +#curl -X PUT -d "firstAffiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationOrder diff --git a/distros/dataverse.no/migration/create-backup-db.sh b/distros/dataverse.no/migration/create-backup-db.sh index efe88b5..4646ce8 100644 --- a/distros/dataverse.no/migration/create-backup-db.sh +++ b/distros/dataverse.no/migration/create-backup-db.sh @@ -3,5 +3,9 @@ docker exec -it postgres bash -c "pg_dump -U dataverse dataverse > /var/lib/p gzip -c /extdisk/database-data-demo/dataverse.dump > "/extdisk/database-data-demo/dataverse$(date +'%Y%m%d').dump.gz" docker exec -it postgres bash -c "createdb -U dataverse dataverse-tmp" docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/dataverse.dump" -cp ./cleanup-database.sql /var/lib/postgresql/data/ +docker cp ./cleanup-database.sql postgres:/var/lib/postgresql/data/ docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/cleanup-database.sql" +docker exec -it postgres bash -c "pg_dump -U dataverse dataverse-tmp > /var/lib/postgresql/data/dataverseCL.dump" +docker exec -it postgres bash -c "dropdb -U dataverse dataverse-tmp" +gzip -c /extdisk/database-data-demo/dataverse.dump > "/extdisk/database-data-demo/dataverseCL$(date +'%Y%m%d').dump.gz" + From 63743ca5b14cb08366ac4078791d40b512bfc8d0 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 23 Aug 2022 09:13:21 +0000 Subject: [PATCH 037/354] Added Make data count --- .../init.d/203-counterprocessor.sh | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 distros/dataverse.no/init.d/203-counterprocessor.sh diff --git a/distros/dataverse.no/init.d/203-counterprocessor.sh b/distros/dataverse.no/init.d/203-counterprocessor.sh new file mode 100644 index 0000000..1ca0075 --- /dev/null +++ b/distros/dataverse.no/init.d/203-counterprocessor.sh @@ -0,0 +1,23 @@ + +#!/bin/bash + +mkdir /opt/payara/counter-processor +cd /opt/payara/counter-processor +wget https://github.com/CDLUC3/counter-processor/archive/v${COUNTERPROSVERSION}.tar.gz -O v${COUNTERPROSVERSION}.tar.gz +tar xvfz v${COUNTERPROSVERSION}.tar.gz +cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION} +curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=${GEOIPLICENSE}&suffix=tar.gz" -o GeoLite2-Country.tar.gz \ + && tar -xzvf GeoLite2-Country.tar.gz \ + && mv GeoLite2-Country_*/GeoLite2-Country.mmdb /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/maxmind_geoip + +wget https://guides.dataverse.org/en/latest/_downloads/a65ffc2dba9f406858591558ae92790c/setup-counter-processor.sh -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/setup-counter-processor.sh +wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce5e6b83e/counter-processor-config.yaml -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/counter-processor-config.yaml + +curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/mdc' http://localhost:8080/api/admin/settings/:MDCLogPath +curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics +pip3 install -r requirements.txt --ignore-installed PyYAML + + +curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath +curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics + From a87b69f92ebfe656417b18d025e49d3c55e1a495 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 23 Aug 2022 09:27:52 +0000 Subject: [PATCH 038/354] updated docker compose for make data count variable --- distros/dataverse.no/docker-compose.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 23d161f..bbe6658 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -255,6 +255,9 @@ services: - "RSERVE_PASSWORD=rserve" - "JVM_OPTS='-Xmx4g -Xms4g -XX:MaxPermSize=4g -XX:PermSize=4g'" - "WEBANALYTICSON" + - "COUNTERPROSVERSION" + - "GEOIPLICENSE" + - "CONFIG_FILE" depends_on: - postgres - solr From 032ad3fcb26dcb82017dac921b0b06afb471c2ac Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Thu, 25 Aug 2022 15:20:42 +0000 Subject: [PATCH 039/354] updated makedatacount --- distros/dataverse.no/init.d/203-counterprocessor.sh | 2 +- distros/dataverse.no/init.d/cronjob/makedatacount.sh | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 distros/dataverse.no/init.d/cronjob/makedatacount.sh diff --git a/distros/dataverse.no/init.d/203-counterprocessor.sh b/distros/dataverse.no/init.d/203-counterprocessor.sh index 1ca0075..099dbca 100644 --- a/distros/dataverse.no/init.d/203-counterprocessor.sh +++ b/distros/dataverse.no/init.d/203-counterprocessor.sh @@ -16,7 +16,7 @@ wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/mdc' http://localhost:8080/api/admin/settings/:MDCLogPath curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics pip3 install -r requirements.txt --ignore-installed PyYAML - +export ALLOWED_ENV=year_month curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics diff --git a/distros/dataverse.no/init.d/cronjob/makedatacount.sh b/distros/dataverse.no/init.d/cronjob/makedatacount.sh new file mode 100644 index 0000000..ebf6c18 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/makedatacount.sh @@ -0,0 +1,4 @@ +#!/bin/bash +export YEAR_MONTH=$(date '+%Y-%m') +cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION} +python3.8 main.py From d98aab4cd937a88c156dfbbbb6fc99a05aac0df7 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 26 Aug 2022 07:55:41 +0000 Subject: [PATCH 040/354] added script to start cron --- startup.sh | 5 +++++ 1 file changed, 5 insertions(+) create mode 100755 startup.sh diff --git a/startup.sh b/startup.sh new file mode 100755 index 0000000..5c8802e --- /dev/null +++ b/startup.sh @@ -0,0 +1,5 @@ +#!/bin/bash +docker-compose up -d +sleep 10 +docker exec dataverse /etc/init.d/cron restart + From b2add71ebcc54022c686ea25604903c4bab77cba Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 26 Aug 2022 08:59:26 +0000 Subject: [PATCH 041/354] documentation about environmental variables --- doc/env.rst | 105 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 doc/env.rst diff --git a/doc/env.rst b/doc/env.rst new file mode 100644 index 0000000..01976dc --- /dev/null +++ b/doc/env.rst @@ -0,0 +1,105 @@ +environment variables +===================== + +main configuration +------------------ + +CONFIGURATION_PATH=/distrib/private +DOCROOT=/distrib +VERSION=5.9.1 +DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno +SECRETS_DIR="${CONFIGURATION_PATH}/secrets" +POSTGRESTMP=/mnt/tmp/postgres + + +Dataverse database settings +--------------------------- + +DATAVERSE_DB_HOST=postgres +DATAVERSE_DB_USER=dataverse +DATAVERSE_DB_PASSWORD=password +DATAVERSE_DB_NAME=dataverse + +solr +---- + +SOLR_SERVICE_HOST=solr:8983 +SOLR_SERVICE_PORT=8983 +DATAVERSE_URL=localhost:8080 +DATAVERSE_SERVICE_HOST=localhost +LOCAL_STORAGE=/mntblob + +Conter Processor +---------------- + +COUNTERPROSVERSION=0.1.04 +GEOIPLICENSE=licencekey +CONFIG_FILE=counter-processor-config.yaml + +Postgres settings +----------------- + +POSTGRES_USER=dataverse +POSTGRES_PASSWORD= +POSTGRES_SERVER=postgres +POSTGRES_DATABASE=dataverse +POSTGRES_DB=dataverse + +Domain configuration and init folder +------------------------------------ + +hostname=test-docker.dataverse.no +traefikhost=test-docker.dataverse.no +INIT_SCRIPTS_FOLDER=/opt/payara/init.d + +Webhook configuration to bundle external services +------------------------------------------------- + +WEBHOOK=/opt/payara/triggers/external-services.py +#CESSDA=True +#CLARIN=True + +DOI parameters +-------------- + +# https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring +doi_authority=10.21337 +doi_provider=DataCite +doi_username=username +doi_password=password +dataciterestapiurlstring=https\:\/\/api.test.datacite.org +baseurlstring=https\:\/\/mds.test.datacite.org + +AWS settings +------------ + +# https://guides.dataverse.org/en/latest/installation/config.html#id90 +aws_bucket_name=2002-green-dataversenotest1 +aws_s3_profile=cloudian +aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + +AWS UiT +--------- + +aws_uit_bucket_name=p-uit-dataverse01-sth +aws_uit_s3_profile=uit +#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + +Mail relay +--------- + +# https://guides.dataverse.org/en/latest/developers/troubleshooting.html +system_email= +mailhost=smtp-relay.exemple.com +mailuser=no-reply@dataverse.no +no_reply_email=no-reply@dataverse.no +smtp_password=password +smtp_port=465 +socket_port=465 + +Federated authentification file +------------------------------- + +# https://guides.dataverse.org/en/latest/installation/shibboleth.html +federated_json_file=/secrets/openid.json + From ca9f73f444d540727fb7e19adadff55b01c848dc Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 26 Aug 2022 09:03:52 +0000 Subject: [PATCH 042/354] forting updated --- doc/env.rst | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/doc/env.rst b/doc/env.rst index 01976dc..d538fa9 100644 --- a/doc/env.rst +++ b/doc/env.rst @@ -5,10 +5,15 @@ main configuration ------------------ CONFIGURATION_PATH=/distrib/private + DOCROOT=/distrib + VERSION=5.9.1 + DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno + SECRETS_DIR="${CONFIGURATION_PATH}/secrets" + POSTGRESTMP=/mnt/tmp/postgres @@ -16,90 +21,126 @@ Dataverse database settings --------------------------- DATAVERSE_DB_HOST=postgres + DATAVERSE_DB_USER=dataverse + DATAVERSE_DB_PASSWORD=password + DATAVERSE_DB_NAME=dataverse solr ---- SOLR_SERVICE_HOST=solr:8983 + SOLR_SERVICE_PORT=8983 + DATAVERSE_URL=localhost:8080 + DATAVERSE_SERVICE_HOST=localhost + LOCAL_STORAGE=/mntblob Conter Processor ---------------- COUNTERPROSVERSION=0.1.04 + GEOIPLICENSE=licencekey + CONFIG_FILE=counter-processor-config.yaml Postgres settings ----------------- POSTGRES_USER=dataverse + POSTGRES_PASSWORD= + POSTGRES_SERVER=postgres + POSTGRES_DATABASE=dataverse + POSTGRES_DB=dataverse Domain configuration and init folder ------------------------------------ hostname=test-docker.dataverse.no + traefikhost=test-docker.dataverse.no + INIT_SCRIPTS_FOLDER=/opt/payara/init.d Webhook configuration to bundle external services ------------------------------------------------- WEBHOOK=/opt/payara/triggers/external-services.py + #CESSDA=True + #CLARIN=True DOI parameters -------------- # https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring + doi_authority=10.21337 + doi_provider=DataCite + doi_username=username + doi_password=password + dataciterestapiurlstring=https\:\/\/api.test.datacite.org + baseurlstring=https\:\/\/mds.test.datacite.org AWS settings ------------ # https://guides.dataverse.org/en/latest/installation/config.html#id90 + aws_bucket_name=2002-green-dataversenotest1 + aws_s3_profile=cloudian + aws_endpoint_url=https\:\/\/s3-oslo.educloud.no AWS UiT --------- aws_uit_bucket_name=p-uit-dataverse01-sth + aws_uit_s3_profile=uit + #aws_endpoint_url=https\:\/\/s3-oslo.educloud.no Mail relay --------- # https://guides.dataverse.org/en/latest/developers/troubleshooting.html + system_email= + mailhost=smtp-relay.exemple.com + mailuser=no-reply@dataverse.no + no_reply_email=no-reply@dataverse.no + smtp_password=password + smtp_port=465 + socket_port=465 Federated authentification file ------------------------------- # https://guides.dataverse.org/en/latest/installation/shibboleth.html + federated_json_file=/secrets/openid.json From 03abf14439a87dbc8e0daa62898ea1695fae576d Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 26 Aug 2022 09:08:16 +0000 Subject: [PATCH 043/354] added comments --- doc/env.rst | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/env.rst b/doc/env.rst index d538fa9..41678db 100644 --- a/doc/env.rst +++ b/doc/env.rst @@ -1,6 +1,14 @@ -environment variables +Environment variables ===================== +To run Dataverse as a completely operational production service, data providers should fill all settings in the configuration file containing information about their domain name, DOIs settings, the language of web interface, mail relay, external controlled vocabularies and storage. There is also possibility to integrate Docker based custom services in the infrastructure and create own software packages serving the needs of the specific data providers, for example, to integrate a separate Shibboleth container for the federated authentication, install new data previewer or activate data processing pipeline. + +Configuration +~~~~~~~~~~~~~ + +The configuration is managed in the central place in an environmental variables file called .env, so administrators have no need to modify other files in the software package. It contains all necessary settings required to deploy Dataverse, for example, to set the language or web interface, establish connection to the local database, SOLR search engine, mail relay or external storage. + + main configuration ------------------ From 6c853dd7902495c2beacef5539db234b2e98c883 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 26 Aug 2022 09:20:52 +0000 Subject: [PATCH 044/354] added installation instructions --- doc/installation.rst | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 doc/installation.rst diff --git a/doc/installation.rst b/doc/installation.rst new file mode 100644 index 0000000..a6329b0 --- /dev/null +++ b/doc/installation.rst @@ -0,0 +1,23 @@ +dataverse.no installation +========================= + +Prerequisites : have sudo rights + +'' +sudo su +sudo apt-get update +sudo apt-get install \ + ca-certificates \ + curl \ + gnupg \ + lsb-release +curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg + +echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ + $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + +sudo apt-get update +sudo apt-get install docker-ce docker-ce-cli containerd.io + +'' From ec1a998334ea7740b2cfe03b4c5369f072fe44ba Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 26 Aug 2022 09:26:45 +0000 Subject: [PATCH 045/354] updated code block --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index a6329b0..67ba13b 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -3,7 +3,7 @@ dataverse.no installation Prerequisites : have sudo rights -'' +`` sudo su sudo apt-get update sudo apt-get install \ @@ -20,4 +20,4 @@ echo \ sudo apt-get update sudo apt-get install docker-ce docker-ce-cli containerd.io -'' +`` From 075a5648ab5c358d02f8336e5279b54289f66344 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 26 Aug 2022 09:30:05 +0000 Subject: [PATCH 046/354] updated code block --- doc/installation.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 67ba13b..2987833 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -3,7 +3,7 @@ dataverse.no installation Prerequisites : have sudo rights -`` +.. sudo su sudo apt-get update sudo apt-get install \ @@ -19,5 +19,4 @@ echo \ sudo apt-get update sudo apt-get install docker-ce docker-ce-cli containerd.io - -`` +.. From b84bf44ab7e6211ef7dca30e76decb794996b547 Mon Sep 17 00:00:00 2001 From: Vyacheslav Tykhonov <4tikhonov@users.noreply.github.com> Date: Fri, 26 Aug 2022 11:31:04 +0200 Subject: [PATCH 047/354] Update installation.rst --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 2987833..b74178b 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -3,7 +3,7 @@ dataverse.no installation Prerequisites : have sudo rights -.. +`` sudo su sudo apt-get update sudo apt-get install \ @@ -19,4 +19,4 @@ echo \ sudo apt-get update sudo apt-get install docker-ce docker-ce-cli containerd.io -.. +`` From 1df43dcfd687375690ac032f2fabb2838003a3ef Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 26 Aug 2022 11:37:29 +0200 Subject: [PATCH 048/354] Update installation.rst --- doc/installation.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/installation.rst b/doc/installation.rst index b74178b..1a74458 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -20,3 +20,4 @@ echo \ sudo apt-get update sudo apt-get install docker-ce docker-ce-cli containerd.io `` +``sudo su`` From bfa67cb354814e3db8f3c91bd36c6abfc50cb943 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 30 Aug 2022 11:13:35 +0200 Subject: [PATCH 049/354] Update installation.rst --- doc/installation.rst | 57 ++++++++++++++++++++++++++++++-------------- 1 file changed, 39 insertions(+), 18 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 1a74458..899bca6 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -2,22 +2,43 @@ dataverse.no installation ========================= Prerequisites : have sudo rights +instal Prerequisites, docker, docker-compose, and git -`` -sudo su -sudo apt-get update -sudo apt-get install \ - ca-certificates \ - curl \ - gnupg \ - lsb-release -curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg - -echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ - $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - -sudo apt-get update -sudo apt-get install docker-ce docker-ce-cli containerd.io -`` -``sudo su`` +.. code-block:: bash + + sudo su + apt-get update + apt-get install \ + ca-certificates \ + curl \ + gnupg \ + lsb-release + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg + + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ + $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + + apt-get update + apt-get install docker-ce docker-ce-cli containerd.io + curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose + chmod +x /usr/local/bin/docker-compose + apt-get install git + mkdir /distrib + cd /distrib + + +Clone the git + +.. code-block:: bash + + git clone https://github.com/IQSS/dataverse-docker + cd /distrib/dataverse-docker/ + git branche dataverse.no + docker network create traefik + cp .env_sample .env + +the folowings needs to be changed in .env + +hostname=demo.dataverse.no +traefikhost=demo.dataverse.no From a0c032f127d6808dc2ee5c5675f971d66393a6bb Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 30 Aug 2022 11:14:27 +0200 Subject: [PATCH 050/354] Update installation.rst --- doc/installation.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/installation.rst b/doc/installation.rst index 899bca6..cf1e523 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -40,5 +40,7 @@ Clone the git the folowings needs to be changed in .env +.. code-block:: bash + hostname=demo.dataverse.no -traefikhost=demo.dataverse.no +traefikhost=demo.dataverse.n From ae06380c3029bf0f1b71b699c130e06e73704e88 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 30 Aug 2022 11:14:48 +0200 Subject: [PATCH 051/354] Update installation.rst --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index cf1e523..2b28c7c 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -42,5 +42,5 @@ the folowings needs to be changed in .env .. code-block:: bash -hostname=demo.dataverse.no -traefikhost=demo.dataverse.n + hostname=demo.dataverse.no + traefikhost=demo.dataverse.n From 902619cf6a83273467c8820ea8c3a44ed667f926 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 30 Aug 2022 11:15:49 +0200 Subject: [PATCH 052/354] Update .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index a8adb79..09c4d5d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ dataverse.war .env +.gitignore #Ignoring IDE files .idea From 9a7aa8120c2a4624702c88b007a778c9d92c8a33 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 30 Aug 2022 14:20:40 +0200 Subject: [PATCH 053/354] Update installation.rst --- doc/installation.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/installation.rst b/doc/installation.rst index 2b28c7c..3a948b7 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -44,3 +44,10 @@ the folowings needs to be changed in .env hostname=demo.dataverse.no traefikhost=demo.dataverse.n + + + +Conjob to automaticaly restart dataverse +---------------------------------------- + +''*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no'' From 46d301e7d09db6487487245a54b0e76de746cdbf Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 30 Aug 2022 14:21:13 +0200 Subject: [PATCH 054/354] Update installation.rst --- doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/installation.rst b/doc/installation.rst index 3a948b7..7226cc9 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -50,4 +50,4 @@ the folowings needs to be changed in .env Conjob to automaticaly restart dataverse ---------------------------------------- -''*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no'' +``*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no`` From 7e7f5ff98693193ca915fd5ced9da405cff808e6 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 31 Aug 2022 10:27:04 +0200 Subject: [PATCH 055/354] Update installation.rst --- doc/installation.rst | 115 ++++++++++++++++++++++++++++++++++++++----- 1 file changed, 104 insertions(+), 11 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 7226cc9..71ee0bd 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -2,7 +2,7 @@ dataverse.no installation ========================= Prerequisites : have sudo rights -instal Prerequisites, docker, docker-compose, and git +install Prerequisites, docker, docker-compose, and git .. code-block:: bash @@ -24,30 +24,123 @@ instal Prerequisites, docker, docker-compose, and git curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose chmod +x /usr/local/bin/docker-compose apt-get install git - mkdir /distrib - cd /distrib + +Dataverse root folder +--------------------- + +defined in ``CONFIGURATION_PATH`` and ``DOCROOT`` default : ``/distrib/`` + +.. code-block:: bash + + export DISTRIB=/distrib + export CONFIGURATION_PATH=$DISTRIB/private + mkdir $DISTRIB + mkdir $CONFIGURATION_PATH + cd $DISTRIB + + Clone the git +------------- .. code-block:: bash git clone https://github.com/IQSS/dataverse-docker - cd /distrib/dataverse-docker/ - git branche dataverse.no - docker network create traefik + cd $DISTRIB/dataverse-docker/ + git branch dataverse.no + cp $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH cp .env_sample .env + docker network create traefik + +The following variables need to be changed in .env + +.. code-block:: bash + + hostname=dataverse.no + traefikhost=dataverse.no + +main configuration -the folowings needs to be changed in .env +.. code-block:: bash + + DISTRIB=/distrib + CONFIGURATION_PATH=/distrib/private + +Solr + +.. code-block:: bash + + LOCAL_STORAGE=/mntblob + +Counter Processor + +.. code-block:: bash + +GEOIPLICENSE=licencekey + +Postgres settings .. code-block:: bash - hostname=demo.dataverse.no - traefikhost=demo.dataverse.n + POSTGRES_PASSWORD=password + + +DOI parameters +.. code-block:: bash + doi_authority=10.21337 + doi_username=username + doi_password=password + +Certificates installation +------------------------- -Conjob to automaticaly restart dataverse ----------------------------------------- +Request the certificates from the correct authority + +dataverse.pem order: +local in file $[hostmame].pem +Intermediate in file sectigo-intermediate.pem +Root in file sectigo-intermediate.pem +TODO : split and cat command for automatisation + + + +certificates should be put in ´´$CONFIGURATION_PATH/configuration/files´´ there are 2 files a .pem file and a .key file + +The name of the certificates files should match the name in ´´$CONFIGURATION_PATH/configuration/files/certificate.toml´´ + +Check the certificates with ´´curl -placeholder hostname ´´ + + +DOCROOT +------- + +The appropriate docroot folder needs to be copied in ``$DISTRIB/docroot`` +for example ´´rsync -arzvP --rsh=ssh ./docroot [ServerName]:/distrib/docroot´´ + + + +Apache and shibboleth configuration +----------------------------------- +Apache configuration + +Change domain name + +Set up shibboleth + +Copy keyen.sh comand + + + + + +Cronjob to automatically restart dataverse +------------------------------------------ + +NB:remeber to stop it if you want it stoped :) ``*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no`` + + From 70c08ef7e9f939ee222cd714b3c238eda65948f2 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 31 Aug 2022 10:56:40 +0200 Subject: [PATCH 056/354] Update installation.rst --- doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/installation.rst b/doc/installation.rst index 71ee0bd..fbeeb11 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -46,7 +46,7 @@ Clone the git .. code-block:: bash - git clone https://github.com/IQSS/dataverse-docker + git clone https://github.com/DataverseNO/dataverse-docker.git cd $DISTRIB/dataverse-docker/ git branch dataverse.no cp $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH From 90e816696aaedf941d7e8e4740bf2f70c7dc47f9 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 31 Aug 2022 11:07:22 +0200 Subject: [PATCH 057/354] Update installation.rst --- doc/installation.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index fbeeb11..03c8918 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -48,9 +48,7 @@ Clone the git git clone https://github.com/DataverseNO/dataverse-docker.git cd $DISTRIB/dataverse-docker/ - git branch dataverse.no - cp $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH - cp .env_sample .env + git checkout dataverse.no docker network create traefik The following variables need to be changed in .env @@ -103,7 +101,7 @@ dataverse.pem order: local in file $[hostmame].pem Intermediate in file sectigo-intermediate.pem Root in file sectigo-intermediate.pem -TODO : split and cat command for automatisation +To make the certificate pem file´´cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem´´ From 95e084b1417fd8b7b26c914ecb0b16bf89cb9e26 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 31 Aug 2022 11:12:55 +0200 Subject: [PATCH 058/354] Update installation.rst --- doc/installation.rst | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 03c8918..639eb89 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -49,6 +49,8 @@ Clone the git git clone https://github.com/DataverseNO/dataverse-docker.git cd $DISTRIB/dataverse-docker/ git checkout dataverse.no + cp -r $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH + cp .env_sample .env docker network create traefik The following variables need to be changed in .env @@ -101,22 +103,22 @@ dataverse.pem order: local in file $[hostmame].pem Intermediate in file sectigo-intermediate.pem Root in file sectigo-intermediate.pem -To make the certificate pem file´´cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem´´ +To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem`` -certificates should be put in ´´$CONFIGURATION_PATH/configuration/files´´ there are 2 files a .pem file and a .key file +certificates should be put in ``$CONFIGURATION_PATH/configuration/files`` there are 2 files a .pem file and a .key file -The name of the certificates files should match the name in ´´$CONFIGURATION_PATH/configuration/files/certificate.toml´´ +The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificate.toml`` -Check the certificates with ´´curl -placeholder hostname ´´ +Check the certificates with ``curl -placeholder hostname`` DOCROOT ------- The appropriate docroot folder needs to be copied in ``$DISTRIB/docroot`` -for example ´´rsync -arzvP --rsh=ssh ./docroot [ServerName]:/distrib/docroot´´ +for example ``rsync -arzvP --rsh=ssh ./docroot [ServerName]:/distrib/docroot`` From 95dec9b32c6a61dbc1141c562e96f4d1bcfcde5d Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 31 Aug 2022 09:34:02 +0000 Subject: [PATCH 059/354] updated .env_sample --- .env_sample | 90 +++++++++++++++++++++++++---------------------------- 1 file changed, 42 insertions(+), 48 deletions(-) diff --git a/.env_sample b/.env_sample index 599d8e0..392ab7d 100644 --- a/.env_sample +++ b/.env_sample @@ -1,41 +1,44 @@ LOCAL_WAR=./dataverse.war -#COMPOSE_FILE=./docker-compose.yml - -# Activate Dataverse language pack by setting language code: -# en - English hu - Hungarian fr - French sl - Slovenian -# se - Swedish es - Spanish it - Italian ua - Ukrainian -# pt - Portuguese ru - Russian at - Austrian German -# br - Brazilian Portuguese ca - French Canadian -#MAINLANG=en +COMPOSE_FILE=distros/dataverse.no/docker-compose.yaml +CONFIGURATION_PATH=/distrib/private +DOCROOT=/distrib +VERSION= 5.11.12.7 +DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno +SECRETS_DIR="${CONFIGURATION_PATH}/secrets" +#WEBANALYTICSON=true # Dataverse database settings DATAVERSE_DB_HOST=postgres DATAVERSE_DB_USER=dataverse -DATAVERSE_DB_PASSWORD=dvnsecret +DATAVERSE_DB_PASSWORD=psqlpassword DATAVERSE_DB_NAME=dataverse +#SOLR SOLR_SERVICE_HOST=solr:8983 SOLR_SERVICE_PORT=8983 DATAVERSE_URL=localhost:8080 DATAVERSE_SERVICE_HOST=localhost +LOCAL_STORAGE=/mntblob + +# Conter Processor +COUNTERPROSVERSION=0.1.04 +GEOIPLICENSE=Licence +CONFIG_FILE=counter-processor-config.yaml # Postgres settings POSTGRES_USER=dataverse -POSTGRES_PASSWORD=dvnsecret +POSTGRES_PASSWORD=psqlpassword POSTGRES_SERVER=postgres POSTGRES_DATABASE=dataverse POSTGRES_DB=dataverse +POSTGRESTMP=/mnt/tmp/postgres + # Domain configuration and init folder -#hostname=www.yourdataverse.org -hostname=locahost:8080 -#traefikhost=www.yourdataverse.org -traefikhost=localhost:8080 +hostname=dataverse.no +traefikhost=dataverse.no INIT_SCRIPTS_FOLDER=/opt/payara/init.d -# traefik email settings -useremail=youremail@domain.com - # Webhook configuration to bundle external services WEBHOOK=/opt/payara/triggers/external-services.py #CESSDA=True @@ -43,44 +46,35 @@ WEBHOOK=/opt/payara/triggers/external-services.py # DOI parameters # https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring -#doi_authority=doi_authority -#doi_provider=doi_provider -#doi_shoulder=doi_shoulder -#doi_username=doi_username -#doi_password=doi_password -dataciterestapiurlstring=https\\:\/\/api.test.datacite.org +doi_authority=10.21337 +doi_provider=DataCite +doi_username=doiusername +doi_password=doipassword +dataciterestapiurlstring=https\:\/\/api.test.datacite.org baseurlstring=https\:\/\/mds.test.datacite.org # AWS settings # https://guides.dataverse.org/en/latest/installation/config.html#id90 -#aws_bucket_name=aws_bucket_name -#aws_s3_profile=aws_s3_profile -#aws_endpoint_url=aws_endpoint_url +aws_bucket_name=awsbucketname +aws_s3_profile=cloudian +aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + +# AWS UiT +aws_uit_bucket_name=awsbucketname2 +aws_uit_s3_profile=uit +#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + # Mail relay # https://guides.dataverse.org/en/latest/developers/troubleshooting.html -#system_email=system_email -#mailhost=mailhost -#mailuser=mailuser -#no_reply_email=no_reply_email -#smtp_password=smtp_password -#smtp_port=smtp_port -#socket_port=socket_port +system_email= +mailhost=smtp-relay.exemple.com +mailuser=no-reply@dataverse.no +no_reply_email=no-reply@dataverse.no +smtp_password=smtppassword +smtp_port=465 +socket_port=465 # Federated authentification file # https://guides.dataverse.org/en/latest/installation/shibboleth.html -#federated_json_file=federated_json_file - -# MinIO bucket 1 -# https://guides.dataverse.org/en/latest/installation/config.html#id87 -#bucketname_1=bucketname_1 -#minio_label_1=minio_label_1 -#minio_bucket_1=minio_bucket_1 -#minio_profile_1=minio_profile_1 - -# MinIO bucket 2 -# https://guides.dataverse.org/en/latest/installation/config.html#id87 -#bucketname_2=bucketname_2 -#minio_label_1=minio_label_2 -#minio_bucket_1=minio_bucket_2 -#minio_profile_1=minio_profile_2 +federated_json_file=/secrets/openid.json From 70c51995f8013051fb81cbc80cf9aa52c33a1228 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 31 Aug 2022 11:53:38 +0200 Subject: [PATCH 060/354] Update installation.rst --- doc/installation.rst | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 639eb89..35a2a2b 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -2,7 +2,7 @@ dataverse.no installation ========================= Prerequisites : have sudo rights -install Prerequisites, docker, docker-compose, and git +install Prerequisites, docker, docker-compose, and git, azure-cli .. code-block:: bash @@ -23,7 +23,7 @@ install Prerequisites, docker, docker-compose, and git apt-get install docker-ce docker-ce-cli containerd.io curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose chmod +x /usr/local/bin/docker-compose - apt-get install git + apt-get install -y git azure-cli Dataverse root folder --------------------- @@ -51,6 +51,8 @@ Clone the git git checkout dataverse.no cp -r $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH cp .env_sample .env + az login --identity + az acr login --name presacrd4oilmd5ss77y docker network create traefik The following variables need to be changed in .env @@ -77,7 +79,7 @@ Counter Processor .. code-block:: bash -GEOIPLICENSE=licencekey + GEOIPLICENSE=licencekey Postgres settings @@ -94,6 +96,12 @@ DOI parameters doi_username=username doi_password=password +AWS + +.. code-block:: bash + + + Certificates installation ------------------------- From 89b04df4f753301cf73f4b257d18391ae9b18f65 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Thu, 1 Sep 2022 10:24:43 +0200 Subject: [PATCH 061/354] Update installation.rst --- doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/installation.rst b/doc/installation.rst index 35a2a2b..181addb 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -20,7 +20,7 @@ install Prerequisites, docker, docker-compose, and git, azure-cli $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null apt-get update - apt-get install docker-ce docker-ce-cli containerd.io + apt-get install -y docker-ce docker-ce-cli containerd.io curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose chmod +x /usr/local/bin/docker-compose apt-get install -y git azure-cli From 32037c723faf6ef887bdaaf0552372bd20c188b8 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Thu, 1 Sep 2022 10:28:23 +0200 Subject: [PATCH 062/354] Update installation.rst --- doc/installation.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 181addb..d357cd2 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -1,5 +1,5 @@ -dataverse.no installation -========================= +Dataverse installation on Microsoft Azure +========================================= Prerequisites : have sudo rights install Prerequisites, docker, docker-compose, and git, azure-cli @@ -59,8 +59,8 @@ The following variables need to be changed in .env .. code-block:: bash - hostname=dataverse.no - traefikhost=dataverse.no + hostname=dataverse.azure.com + traefikhost=dataverse.azure.om main configuration From 0d091a1581855d937942d42c9560599b2d8c3149 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Thu, 1 Sep 2022 10:52:09 +0200 Subject: [PATCH 063/354] Create envFileSetup --- doc/envFileSetup | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 doc/envFileSetup diff --git a/doc/envFileSetup b/doc/envFileSetup new file mode 100644 index 0000000..f082cff --- /dev/null +++ b/doc/envFileSetup @@ -0,0 +1,46 @@ +The following variables need to be changed in .env depending on your instalation + +.. code-block:: bash + + hostname=dataverse.azure.com + traefikhost=dataverse.azure.om + +main configuration + +.. code-block:: bash + + DISTRIB=/distrib + CONFIGURATION_PATH=/distrib/private + +Solr + +.. code-block:: bash + + LOCAL_STORAGE=/mntblob + +Counter Processor + +.. code-block:: bash + + GEOIPLICENSE=licencekey + +Postgres settings + +.. code-block:: bash + + POSTGRES_PASSWORD=password + + +DOI parameters + +.. code-block:: bash + + doi_authority=10.21337 + doi_username=username + doi_password=doiword + +AWS + +.. code-block:: bash + + aws_bucket_name=bucketName From f331cc4b38ad5adaa543a27e73446fd85f1840f6 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Thu, 1 Sep 2022 11:00:47 +0200 Subject: [PATCH 064/354] Update installation.rst --- doc/installation.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/installation.rst b/doc/installation.rst index d357cd2..dd6ecaf 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -108,9 +108,13 @@ Certificates installation Request the certificates from the correct authority dataverse.pem order: + local in file $[hostmame].pem + Intermediate in file sectigo-intermediate.pem + Root in file sectigo-intermediate.pem + To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem`` From 9aeda22d489ed9796bc0cc343f2e79d3bff1bacd Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Thu, 1 Sep 2022 11:05:02 +0200 Subject: [PATCH 065/354] Update installation.rst --- doc/installation.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index dd6ecaf..05c0b8f 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -107,13 +107,13 @@ Certificates installation Request the certificates from the correct authority -dataverse.pem order: +dataverse.no.pem order: -local in file $[hostmame].pem +local, in file $[hostname].pem -Intermediate in file sectigo-intermediate.pem +Intermediate, in file sectigo-intermediate.pem -Root in file sectigo-intermediate.pem +Root, in file sectigo-intermediate.pem To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem`` From 7050fafe0d42472353368cb325853803fc623c4f Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Thu, 1 Sep 2022 11:54:55 +0200 Subject: [PATCH 066/354] Update installation.rst --- doc/installation.rst | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 05c0b8f..63dbb1b 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -121,7 +121,7 @@ To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataver certificates should be put in ``$CONFIGURATION_PATH/configuration/files`` there are 2 files a .pem file and a .key file -The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificate.toml`` +The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificates.toml`` Check the certificates with ``curl -placeholder hostname`` @@ -144,9 +144,11 @@ Set up shibboleth Copy keyen.sh comand - - - +Check that your dataverse instalation is axessible +-------------------------------------------------- +.. code-block:: bash +cd $DISTRIB/dataverse-docker/ +docker-compose up -d Cronjob to automatically restart dataverse ------------------------------------------ From cd768f95800d0f86d2ac4cfaf14ccf42f89e9d46 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 2 Sep 2022 07:48:43 +0000 Subject: [PATCH 067/354] Disabled image magic for pdf creation --- distros/dataverse.no/init.d/012-disable-imageMagick.sh | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 distros/dataverse.no/init.d/012-disable-imageMagick.sh diff --git a/distros/dataverse.no/init.d/012-disable-imageMagick.sh b/distros/dataverse.no/init.d/012-disable-imageMagick.sh new file mode 100644 index 0000000..258b87c --- /dev/null +++ b/distros/dataverse.no/init.d/012-disable-imageMagick.sh @@ -0,0 +1,3 @@ +#!/bin/bash +mv /usr/bin/convert /usr/bin/convert.MOVED + From 3eab5346b80d0487ada2e2ef76562f718c84f863 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 2 Sep 2022 09:08:45 +0000 Subject: [PATCH 068/354] renamed certificates file for consistency --- distros/dataverse.no/docker-compose.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index bbe6658..2be8d72 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -67,8 +67,8 @@ services: volumes: - ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth - ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf - - ${CONFIGURATION_PATH}/configuration/files/${hostname}.pem:/etc/pki/tls/certs/localhost.crt - - ${CONFIGURATION_PATH}/configuration/files/${hostname}.key:/etc/pki/tls/private/localhost.key + - ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/pki/tls/certs/localhost.crt + - ${CONFIGURATION_PATH}/configuration/files/localhost.key:/etc/pki/tls/private/localhost.key hostname: ${hostname} labels: - "traefik.enable=true" From 18dd7aaf8317c278a8e29a83ca67085cb9f0f17c Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 11:58:10 +0200 Subject: [PATCH 069/354] Create shibbotheth.rst --- doc/shibbotheth.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 doc/shibbotheth.rst diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst new file mode 100644 index 0000000..e1eb1d9 --- /dev/null +++ b/doc/shibbotheth.rst @@ -0,0 +1,13 @@ +Shibboleth +========== + +Asuming a working shibboleth configuration in ``/tmp/shibboleth.tar.gz`` + +Copy and extract the files to the proper location + +.. code-block:: bash + + mv /tmp/shibboleth /$DISTRIB/private + cd /$DISTRIB/private + tar -xvf shibboleth.tar.gz . + From c9ef6b3898ee7e7b7e06be4aef1ae4e89b74f7cc Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:00:20 +0200 Subject: [PATCH 070/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index e1eb1d9..31963bc 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -11,3 +11,5 @@ Copy and extract the files to the proper location cd /$DISTRIB/private tar -xvf shibboleth.tar.gz . +Change hostname identity in ``shiboleth2.xm`` +you are done (mostely) From 240ffe4212995d61f74603bf267116dcef6cdb70 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:08:40 +0200 Subject: [PATCH 071/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index 31963bc..b6b9691 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -11,5 +11,6 @@ Copy and extract the files to the proper location cd /$DISTRIB/private tar -xvf shibboleth.tar.gz . -Change hostname identity in ``shiboleth2.xm`` +Change hostname ``entityID`` in ``shiboleth2.xm`` +Change SMAL service provider location you are done (mostely) From 8e039a5825f76c25424886edcf7492fcd3682e04 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:17:22 +0200 Subject: [PATCH 072/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index b6b9691..a190664 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -11,6 +11,6 @@ Copy and extract the files to the proper location cd /$DISTRIB/private tar -xvf shibboleth.tar.gz . -Change hostname ``entityID`` in ``shiboleth2.xm`` +Change hostname ``entityID`` in ``shibboleth2.xm`` Change SMAL service provider location you are done (mostely) From 1e59066938c4d9e2838d23719d0db64359d30123 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:21:17 +0200 Subject: [PATCH 073/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index a190664..4e580bf 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -7,8 +7,8 @@ Copy and extract the files to the proper location .. code-block:: bash - mv /tmp/shibboleth /$DISTRIB/private - cd /$DISTRIB/private + mv /tmp/shibboleth $DISTRIB/private + cd $DISTRIB/private tar -xvf shibboleth.tar.gz . Change hostname ``entityID`` in ``shibboleth2.xm`` From 8579f0efdda6c994a47445ffe83b16b06cf7ba73 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:22:10 +0200 Subject: [PATCH 074/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index 4e580bf..e673043 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -7,7 +7,7 @@ Copy and extract the files to the proper location .. code-block:: bash - mv /tmp/shibboleth $DISTRIB/private + mv /tmp/shibboleth* $DISTRIB/private cd $DISTRIB/private tar -xvf shibboleth.tar.gz . From d345225a0f0b7447c3ecb1226e89a9f9ea7c0150 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:26:42 +0200 Subject: [PATCH 075/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index e673043..02eae40 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -7,9 +7,10 @@ Copy and extract the files to the proper location .. code-block:: bash - mv /tmp/shibboleth* $DISTRIB/private - cd $DISTRIB/private - tar -xvf shibboleth.tar.gz . + export DISTRIB=/distrib + mv /tmp/shibboleth* $DISTRIB/private + cd $DISTRIB/private + tar -xvf shibboleth.tar.gz . Change hostname ``entityID`` in ``shibboleth2.xm`` Change SMAL service provider location From c67123bf3499155261163a2e9e02a8b86138a5bc Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:27:05 +0200 Subject: [PATCH 076/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index 02eae40..636c86c 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -8,7 +8,7 @@ Copy and extract the files to the proper location .. code-block:: bash export DISTRIB=/distrib - mv /tmp/shibboleth* $DISTRIB/private + cp /tmp/shibboleth* $DISTRIB/private cd $DISTRIB/private tar -xvf shibboleth.tar.gz . From 50698a7bfd5a22318371dc4261db49a0af6ec551 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:31:13 +0200 Subject: [PATCH 077/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index 636c86c..81a210a 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -10,7 +10,7 @@ Copy and extract the files to the proper location export DISTRIB=/distrib cp /tmp/shibboleth* $DISTRIB/private cd $DISTRIB/private - tar -xvf shibboleth.tar.gz . + tar -xvf shibboleth.tar.gz Change hostname ``entityID`` in ``shibboleth2.xm`` Change SMAL service provider location From 43c68a0629656d266c9826840fd0e87ee9cc276a Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:32:24 +0200 Subject: [PATCH 078/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index 81a210a..5cb173f 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -11,7 +11,8 @@ Copy and extract the files to the proper location cp /tmp/shibboleth* $DISTRIB/private cd $DISTRIB/private tar -xvf shibboleth.tar.gz + cd $DISTRIB/private/shibboleth -Change hostname ``entityID`` in ``shibboleth2.xm`` +Change hostname ``entityID`` in ``shibboleth2.xml`` Change SMAL service provider location you are done (mostely) From 21887d6a264d002cd64fb7a955d701cba2c6d5ac Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 2 Sep 2022 12:34:55 +0200 Subject: [PATCH 079/354] Update shibbotheth.rst --- doc/shibbotheth.rst | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst index 5cb173f..3643369 100644 --- a/doc/shibbotheth.rst +++ b/doc/shibbotheth.rst @@ -13,6 +13,8 @@ Copy and extract the files to the proper location tar -xvf shibboleth.tar.gz cd $DISTRIB/private/shibboleth -Change hostname ``entityID`` in ``shibboleth2.xml`` -Change SMAL service provider location +Change domain name ``entityID`` in ``shibboleth2.xml`` to the domain name of the instalation + +Change SMAL service provider location + you are done (mostely) From 19b1a2e3b9447f30cd0c87a45380290580c86dd0 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 12:41:23 +0200 Subject: [PATCH 080/354] functionality validation how to --- doc/functionalityValidation | 40 +++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 doc/functionalityValidation diff --git a/doc/functionalityValidation b/doc/functionalityValidation new file mode 100644 index 0000000..33d6395 --- /dev/null +++ b/doc/functionalityValidation @@ -0,0 +1,40 @@ +mail relay +---------- + in ``.en`` + ~~~~~~~~~~ + +Set ``system_email=`` + + +set the smtp relay + +.. code-block:: bash + + mailhost=smtp-relay.exemple.com + mailuser=no-reply@dataverse.no + no_reply_email=no-reply@dataverse.no + smtp_password=password + smtp_port=465 + socket_port=465 + +doi settings +------------ + +feide authentication +-------------------- + +local storage +------------- + +S3 support +---------- + +large files +----------- + + +counter processor +----------------- + +custom settings +-------------- From 8000d96b32dc2a098cae3a42078f978a328487ef Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 12:43:10 +0200 Subject: [PATCH 081/354] Update functionalityValidation --- doc/functionalityValidation | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/functionalityValidation b/doc/functionalityValidation index 33d6395..bea2650 100644 --- a/doc/functionalityValidation +++ b/doc/functionalityValidation @@ -1,3 +1,11 @@ +Default admin login +------------------- + +username : dataverseAdmin + +password : admin + + mail relay ---------- in ``.en`` From 62b680e98bb58f6d2093f3a2d3fd1b905e185a04 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 12:43:25 +0200 Subject: [PATCH 082/354] Rename functionalityValidation to functionalityValidation.rst --- doc/{functionalityValidation => functionalityValidation.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/{functionalityValidation => functionalityValidation.rst} (100%) diff --git a/doc/functionalityValidation b/doc/functionalityValidation.rst similarity index 100% rename from doc/functionalityValidation rename to doc/functionalityValidation.rst From e3d1ef04ab5a773a073eecc036304b564332e3a9 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 12:48:44 +0200 Subject: [PATCH 083/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index bea2650..6c15d92 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -6,6 +6,15 @@ username : dataverseAdmin password : admin +S3 storage +---------- + +For testting purpuses S3 storage fuctionality can be disabled using : + +``mv /distrib/dataverse-docker/distros/dataverse.no/init.d/0*s3*.sh /tmp/`` + +then restart dataverse + mail relay ---------- in ``.en`` @@ -37,6 +46,11 @@ local storage S3 support ---------- +if S3 storage was disabled re unable it using : + + +``mv /tmp/0*s3*.sh /distrib/dataverse-docker/distros/dataverse.no/init.d/`` + large files ----------- From c4f6d6cf487bbaf882afd0dd63a8114f4424676b Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 12:49:06 +0200 Subject: [PATCH 084/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 6c15d92..29ec158 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -17,6 +17,7 @@ then restart dataverse mail relay ---------- + in ``.en`` ~~~~~~~~~~ From ecce12de7241e63c6918617e1eab6205ca28f730 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:15:03 +0200 Subject: [PATCH 085/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 29ec158..fa0bd24 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -18,11 +18,12 @@ then restart dataverse mail relay ---------- - in ``.en`` - ~~~~~~~~~~ + in ``.env`` Set ``system_email=`` +for exemple ``stem_email=000xxx@uit.no`` + set the smtp relay @@ -35,6 +36,11 @@ set the smtp relay smtp_port=465 socket_port=465 +in the web interface +¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨ +Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo + + doi settings ------------ From 46b9258225f20a7d9cf372c517992421d19571dd Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:15:32 +0200 Subject: [PATCH 086/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index fa0bd24..704c18c 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -37,7 +37,7 @@ set the smtp relay socket_port=465 in the web interface -¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨ +~~~~~~~~~~~~~~~~~~~~ Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo From d355f8db25b7fc61f89e44e097ed5d68098e18f2 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:16:16 +0200 Subject: [PATCH 087/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 704c18c..339d649 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -18,7 +18,8 @@ then restart dataverse mail relay ---------- - in ``.env`` +in ``.env`` +~~~~~~~~~~~ Set ``system_email=`` From 9c0ab98ffaad3c471e8939824cb9301cf7e096ad Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:17:15 +0200 Subject: [PATCH 088/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 339d649..7a18be2 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -9,7 +9,7 @@ password : admin S3 storage ---------- -For testting purpuses S3 storage fuctionality can be disabled using : +For testing purpuses S3 storage fuctionality can be disabled using : ``mv /distrib/dataverse-docker/distros/dataverse.no/init.d/0*s3*.sh /tmp/`` @@ -23,7 +23,7 @@ in ``.env`` Set ``system_email=`` -for exemple ``stem_email=000xxx@uit.no`` +for example ``stem_email=000xxx@uit.no`` set the smtp relay From 709301b49a900cce5a564dda884516cc7d4e90ed Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:17:39 +0200 Subject: [PATCH 089/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 7a18be2..5d946b5 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -23,7 +23,7 @@ in ``.env`` Set ``system_email=`` -for example ``stem_email=000xxx@uit.no`` +for example ``system_email=000xxx@uit.no`` set the smtp relay From 35253d6a0278e789b00f82259766088ec1937dee Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:20:22 +0200 Subject: [PATCH 090/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 5d946b5..7984dd5 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -40,6 +40,7 @@ set the smtp relay in the web interface ~~~~~~~~~~~~~~~~~~~~ Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo +the "Verify email" button should send en email. doi settings From 999f9e2adf4c4d4dfa74a9d9e8f4f22520375da0 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:20:49 +0200 Subject: [PATCH 091/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 7984dd5..f3931c4 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -40,8 +40,8 @@ set the smtp relay in the web interface ~~~~~~~~~~~~~~~~~~~~ Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo -the "Verify email" button should send en email. +the "Verify email" button should send en email. doi settings ------------ From 84f164f2a4ad6d6a0b711d3c8c23a0ebb3e55206 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 13:55:13 +0200 Subject: [PATCH 092/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index f3931c4..224c1b3 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -46,6 +46,20 @@ the "Verify email" button should send en email. doi settings ------------ +in ``.env`` +~~~~~~~~~~~ + +set doi configuration + +.. code-block:: bash + +in ``/secrets`` +~~~~~~~~~~~~~~~~ + +set the password in ``$DISTRIB/private/secrets/doi_asadmin`` +``AS_ADMIN_ALIASPASSWORD=changeme`` + + feide authentication -------------------- From d78c47d9d35632b07108523e91548c96ee68369f Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Mon, 5 Sep 2022 14:00:11 +0200 Subject: [PATCH 093/354] Update functionalityValidation.rst --- doc/functionalityValidation.rst | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst index 224c1b3..3b297aa 100644 --- a/doc/functionalityValidation.rst +++ b/doc/functionalityValidation.rst @@ -57,7 +57,12 @@ in ``/secrets`` ~~~~~~~~~~~~~~~~ set the password in ``$DISTRIB/private/secrets/doi_asadmin`` -``AS_ADMIN_ALIASPASSWORD=changeme`` + +for example with "changeme" as a password ``AS_ADMIN_ALIASPASSWORD=changeme`` + +set the password in ``$DISTRIB/private/secrets/doi/password`` + +for example with "changeme" as a password ``changeme`` feide authentication From f15303540d9c6d2cc622d39e774a08a303e9a7ab Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 7 Sep 2022 07:40:01 +0000 Subject: [PATCH 094/354] achitecture for the aws bucket keys --- secrets/aws-cli/.aws/config | 13 +++++++++++++ secrets/aws-cli/.aws/credentials | 7 +++++++ secrets/aws-cli/aws-list.sh | 2 ++ 3 files changed, 22 insertions(+) create mode 100755 secrets/aws-cli/.aws/config create mode 100755 secrets/aws-cli/.aws/credentials create mode 100755 secrets/aws-cli/aws-list.sh diff --git a/secrets/aws-cli/.aws/config b/secrets/aws-cli/.aws/config new file mode 100755 index 0000000..5a6050e --- /dev/null +++ b/secrets/aws-cli/.aws/config @@ -0,0 +1,13 @@ +[cloudian] +output = json +region = + +[uit] +output = json +region = + +[default] +output = json +region = + + diff --git a/secrets/aws-cli/.aws/credentials b/secrets/aws-cli/.aws/credentials new file mode 100755 index 0000000..c1f1cce --- /dev/null +++ b/secrets/aws-cli/.aws/credentials @@ -0,0 +1,7 @@ +[cloudian] +aws_access_key_id= +aws_secret_access_key= + +[uit] +aws_access_key_id= +aws_secret_access_key= diff --git a/secrets/aws-cli/aws-list.sh b/secrets/aws-cli/aws-list.sh new file mode 100755 index 0000000..3afa73c --- /dev/null +++ b/secrets/aws-cli/aws-list.sh @@ -0,0 +1,2 @@ +#!/bin/bash +aws --endpoint-url https:// s3api list-objects-v2 --bucket Date: Wed, 7 Sep 2022 09:10:59 +0000 Subject: [PATCH 095/354] openid template in secrets --- secrets/openid.json | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 secrets/openid.json diff --git a/secrets/openid.json b/secrets/openid.json new file mode 100644 index 0000000..87449f1 --- /dev/null +++ b/secrets/openid.json @@ -0,0 +1,9 @@ +{ + "id":"feide", + "factoryAlias":"oidc", + "title":"edugain", + "subtitle":"", + "factoryData":"type: oidc | issuer: https://auth.dataporten.no | clientId: | clientSecret:", + "enabled":true +} + From 111dae2fe0ecd807b441ea380987a338ec0d0916 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Thu, 8 Sep 2022 13:46:33 +0200 Subject: [PATCH 096/354] Create prerequisitResourses --- doc/prerequisitResourses | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 doc/prerequisitResourses diff --git a/doc/prerequisitResourses b/doc/prerequisitResourses new file mode 100644 index 0000000..777ae62 --- /dev/null +++ b/doc/prerequisitResourses @@ -0,0 +1,7 @@ +in ``/ressourse`` + +SSL certificate ``/resourse/private/secrets/configuration`` + +Database ``/resourse/private/database-data.dump'' + +Docroot``/resourse/docroot`` From e48dd93cb486d075a37c20f57fd1362c5132ff18 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 09:36:14 +0200 Subject: [PATCH 097/354] Rename prerequisitResourses to prerequisitsResourses.rts --- doc/{prerequisitResourses => prerequisitsResourses.rts} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/{prerequisitResourses => prerequisitsResourses.rts} (100%) diff --git a/doc/prerequisitResourses b/doc/prerequisitsResourses.rts similarity index 100% rename from doc/prerequisitResourses rename to doc/prerequisitsResourses.rts From 3283acd72e3d5d9420cc353cf491e68cf15a54c1 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 09:36:50 +0200 Subject: [PATCH 098/354] Rename prerequisitsResourses.rts to prerequisitsResourses.rst --- doc/{prerequisitsResourses.rts => prerequisitsResourses.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/{prerequisitsResourses.rts => prerequisitsResourses.rst} (100%) diff --git a/doc/prerequisitsResourses.rts b/doc/prerequisitsResourses.rst similarity index 100% rename from doc/prerequisitsResourses.rts rename to doc/prerequisitsResourses.rst From d6e40c6c14c28f6b5255ddb0d2c7b748477daecc Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 09:42:39 +0200 Subject: [PATCH 099/354] Update prerequisitsResourses.rst --- doc/prerequisitsResourses.rst | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst index 777ae62..52ce2aa 100644 --- a/doc/prerequisitsResourses.rst +++ b/doc/prerequisitsResourses.rst @@ -1,7 +1,14 @@ + in ``/ressourse`` -SSL certificate ``/resourse/private/secrets/configuration`` +SSL certificate ``/resourse/private/configuration`` + +Database ``/resourse/private/database-data`` -Database ``/resourse/private/database-data.dump'' +Secrets ``/resourse/private/secrets`` Docroot``/resourse/docroot`` + +.. code-block:: bash + + cp /resourse/* $DISTRIB/ From e6c95431d41d978a1d9047312e19340e29d10ba1 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 09:42:59 +0200 Subject: [PATCH 100/354] Update prerequisitsResourses.rst --- doc/prerequisitsResourses.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst index 52ce2aa..5f5a3f4 100644 --- a/doc/prerequisitsResourses.rst +++ b/doc/prerequisitsResourses.rst @@ -7,7 +7,7 @@ Database ``/resourse/private/database-data`` Secrets ``/resourse/private/secrets`` -Docroot``/resourse/docroot`` +Docroot ``/resourse/docroot`` .. code-block:: bash From b8368b7d9a549f56d6e80eb1222731a5f8a3cccf Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 09:47:33 +0200 Subject: [PATCH 101/354] Create testing.rst --- doc/testing.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/testing.rst diff --git a/doc/testing.rst b/doc/testing.rst new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/doc/testing.rst @@ -0,0 +1 @@ + From 51b302b3d67b813e3db6501d9884932bdbf0603d Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 09:56:25 +0200 Subject: [PATCH 102/354] Update installation.rst --- doc/installation.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/installation.rst b/doc/installation.rst index 63dbb1b..09d9476 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -55,6 +55,13 @@ Clone the git az acr login --name presacrd4oilmd5ss77y docker network create traefik +if using pre-made resourses archive +.. code-block:: bash + tar -xvf resourse.tar $DISTRIB/ + cp $DISTRIB/private/secrets/.env $DISTRIB/dataverse-docker/.env + +skip to cronjob instalation step + The following variables need to be changed in .env .. code-block:: bash From 55a997ca9c37df82196a4c487fe157b3b3a05ef4 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 10:26:04 +0200 Subject: [PATCH 103/354] Update installation.rst --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 09d9476..baba9e6 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -57,8 +57,8 @@ Clone the git if using pre-made resourses archive .. code-block:: bash - tar -xvf resourse.tar $DISTRIB/ - cp $DISTRIB/private/secrets/.env $DISTRIB/dataverse-docker/.env + tar -xvzf /tmp/resourse.tar $DISTRIB/ + cp $DISTRIB/.env $DISTRIB/dataverse-docker/.env skip to cronjob instalation step From 358a1b35fd45effa8a351031265249e9869a949b Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 10:29:05 +0200 Subject: [PATCH 104/354] Update installation.rst --- doc/installation.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/installation.rst b/doc/installation.rst index baba9e6..79a952e 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -56,7 +56,9 @@ Clone the git docker network create traefik if using pre-made resourses archive + .. code-block:: bash + tar -xvzf /tmp/resourse.tar $DISTRIB/ cp $DISTRIB/.env $DISTRIB/dataverse-docker/.env From 9d93431b9ac7ddee90a1e5e68fc903b279663975 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 10:56:24 +0200 Subject: [PATCH 105/354] Update installation.rst --- doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/installation.rst b/doc/installation.rst index 79a952e..51d647f 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -60,7 +60,7 @@ if using pre-made resourses archive .. code-block:: bash tar -xvzf /tmp/resourse.tar $DISTRIB/ - cp $DISTRIB/.env $DISTRIB/dataverse-docker/.env + cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/ skip to cronjob instalation step From 8da851bf390fcc67ed4ce03452cb2a29243b949f Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:09:09 +0200 Subject: [PATCH 106/354] Update installation.rst --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 51d647f..c13274e 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -147,9 +147,9 @@ Apache and shibboleth configuration ----------------------------------- Apache configuration -Change domain name +Change domain name in -Set up shibboleth +Set up shibboleth ``shibboleth/shibboleth2.xml`` Copy keyen.sh comand From da83e024048e15d23c580b8bde3ee1ec1f16d437 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:29:00 +0200 Subject: [PATCH 107/354] Update prerequisitsResourses.rst --- doc/prerequisitsResourses.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst index 5f5a3f4..afdfd9b 100644 --- a/doc/prerequisitsResourses.rst +++ b/doc/prerequisitsResourses.rst @@ -7,8 +7,14 @@ Database ``/resourse/private/database-data`` Secrets ``/resourse/private/secrets`` +Shibboleth ``/resourse/private/shibboleth`` + +env ``/resourse/private/.env`` + Docroot ``/resourse/docroot`` + + .. code-block:: bash cp /resourse/* $DISTRIB/ From 5c7e9485914e6106b795a2af5baa3972f0c2c1da Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:33:58 +0200 Subject: [PATCH 108/354] Update installation.rst --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index c13274e..201a2e0 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -58,8 +58,8 @@ Clone the git if using pre-made resourses archive .. code-block:: bash - - tar -xvzf /tmp/resourse.tar $DISTRIB/ + cd /$DISTRIB + tar -xvzf /tmp/resourse.tar.gz cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/ skip to cronjob instalation step From a23a0e11c501a1ef5536472ddb4ff6077e24515c Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:36:14 +0200 Subject: [PATCH 109/354] Update installation.rst --- doc/installation.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/installation.rst b/doc/installation.rst index 201a2e0..f8dfa99 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -58,6 +58,7 @@ Clone the git if using pre-made resourses archive .. code-block:: bash + cd /$DISTRIB tar -xvzf /tmp/resourse.tar.gz cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/ From 6586fb8d145f954f3a3cbc14f5ab4fb962e59b4e Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:36:55 +0200 Subject: [PATCH 110/354] Update installation.rst --- doc/installation.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/installation.rst b/doc/installation.rst index f8dfa99..fc9f64f 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -60,7 +60,7 @@ if using pre-made resourses archive .. code-block:: bash cd /$DISTRIB - tar -xvzf /tmp/resourse.tar.gz + tar -xvzf /tmp/resourses.tar.gz cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/ skip to cronjob instalation step From 5fa6b59abc9f456efcd5f31984d4570b7a90beb2 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:54:18 +0200 Subject: [PATCH 111/354] Update installation.rst --- doc/installation.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index fc9f64f..5f2a09e 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -63,7 +63,7 @@ if using pre-made resourses archive tar -xvzf /tmp/resourses.tar.gz cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/ -skip to cronjob instalation step +skip to checking step The following variables need to be changed in .env @@ -157,8 +157,9 @@ Copy keyen.sh comand Check that your dataverse instalation is axessible -------------------------------------------------- .. code-block:: bash -cd $DISTRIB/dataverse-docker/ -docker-compose up -d + + cd $DISTRIB/dataverse-docker/ + docker-compose up -d Cronjob to automatically restart dataverse ------------------------------------------ From d2e5cc4dadaec456ec9e39de96d0fd1219b695ce Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:57:09 +0200 Subject: [PATCH 112/354] Update installation.rst --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 5f2a09e..639fc81 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -63,7 +63,7 @@ if using pre-made resourses archive tar -xvzf /tmp/resourses.tar.gz cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/ -skip to checking step +go to "Check that your dataverse instalation is accessible" The following variables need to be changed in .env @@ -154,7 +154,7 @@ Set up shibboleth ``shibboleth/shibboleth2.xml`` Copy keyen.sh comand -Check that your dataverse instalation is axessible +Check that your dataverse instalation is accessible -------------------------------------------------- .. code-block:: bash From 4da30ee69f0394a3c1413b3dd57a1782c8c3ebd5 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Sep 2022 11:57:53 +0200 Subject: [PATCH 113/354] Update installation.rst --- doc/installation.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/installation.rst b/doc/installation.rst index 639fc81..ed63191 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -154,8 +154,8 @@ Set up shibboleth ``shibboleth/shibboleth2.xml`` Copy keyen.sh comand -Check that your dataverse instalation is accessible --------------------------------------------------- +Check that your dataverse installation is accessible +---------------------------------------------------- .. code-block:: bash cd $DISTRIB/dataverse-docker/ From a809dee859dcd857a5c02d5533c489dc7e5aef2d Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 16 Sep 2022 10:10:52 +0200 Subject: [PATCH 114/354] Create maintenance.rst --- doc/maintenance.rst | 96 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 doc/maintenance.rst diff --git a/doc/maintenance.rst b/doc/maintenance.rst new file mode 100644 index 0000000..e1f531f --- /dev/null +++ b/doc/maintenance.rst @@ -0,0 +1,96 @@ +Create dump of production database +---------------------------------- + +connect using ssh to production VM + +if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` + +.. code-block:: bash + + Su postgress + pg_dump -U dataverse dataverse > /tmp/dataverse.dump; + +Transmit dump file to appropriate vm using rsync ``rsync -arvzP --rsh=ssh :/tmp/dataverse.dump :/tmp/dataverse.dump `` + +Upload dump of production database +---------------------------------- + +Connect using ssh to new VM + + +.. code-block:: bash + + docker cp /tmp/dataverse.dump postgres:/tmp/ + docker stop dataverse + + +If you are using a dockerized version : ``docker exec -it postgress /bin/sh`` + +.. code-block:: bash + + su postgress + dropdb -U dataverse dataverse; + createdb -U dataverse dataverse; + psql -U dataverse dataverse -f /tmp/dataverse.dump + + +Usefull database alteration +=========================== + +Replace production DOI with test DOI +------------------------------------ + +if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` + +.. code-block:: bash + + Su postgress + psql -U dataverse dataverse + update dvobject set authority='10.21337' where authority like '%10.18710%'; + +Change dataverse admin password +------------------------------- + +if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` + +.. code-block:: bash + + Su postgress + psql -U dataverse dataverse + update builtinuser set encryptedpassword= '' where username like '%dataverseAdmin%'; + + +Change the database passord +--------------------------- + +if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` +this needs to be consistent with the passord in ``secrets/db/password`` and in ``.env`` + +.. code-block:: bash + + Su postgress + ALTER USER DATAVERSE WITH PASSWORD ''; + + + +Change feide login endpoint +--------------------------- + +if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` + +.. code-block:: bash + + Su postgress + psql -U dataverse dataverse + sql update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.'); + + + + + + + + + + + From 43ad8c8fd33774f41368f743d8ba2cb5ed53f7af Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 16 Sep 2022 10:57:32 +0200 Subject: [PATCH 115/354] Update maintenance.rst --- doc/maintenance.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index e1f531f..5a50463 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -85,6 +85,15 @@ if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` sql update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.'); +Change File storage location +---------------------------- + +NOT READY YET + +update dvobject set storageidentifier='S3://2002-green-dataversenotest1:' where dtype='Dataset'; + +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + From 47dc1a0570c8f61deb7031bfd6d62a6cd7e2822b Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 12:46:40 +0200 Subject: [PATCH 116/354] Update maintenance.rst --- doc/maintenance.rst | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 5a50463..c169e02 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -7,7 +7,7 @@ if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` .. code-block:: bash - Su postgress + Su postgres pg_dump -U dataverse dataverse > /tmp/dataverse.dump; Transmit dump file to appropriate vm using rsync ``rsync -arvzP --rsh=ssh :/tmp/dataverse.dump :/tmp/dataverse.dump `` @@ -28,11 +28,14 @@ If you are using a dockerized version : ``docker exec -it postgress /bin/sh`` .. code-block:: bash - su postgress + su postgres dropdb -U dataverse dataverse; createdb -U dataverse dataverse; psql -U dataverse dataverse -f /tmp/dataverse.dump + Change password + + Usefull database alteration =========================== @@ -44,7 +47,7 @@ if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` .. code-block:: bash - Su postgress + Su postgres psql -U dataverse dataverse update dvobject set authority='10.21337' where authority like '%10.18710%'; @@ -55,7 +58,7 @@ if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` .. code-block:: bash - Su postgress + Su postgres psql -U dataverse dataverse update builtinuser set encryptedpassword= '' where username like '%dataverseAdmin%'; @@ -68,7 +71,7 @@ this needs to be consistent with the passord in ``secrets/db/password`` and in ` .. code-block:: bash - Su postgress + Su postgres ALTER USER DATAVERSE WITH PASSWORD ''; @@ -80,7 +83,7 @@ if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` .. code-block:: bash - Su postgress + Su postgres psql -U dataverse dataverse sql update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.'); From c7ab38600adb58646cfbfbe931a9f65c8ed065e6 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 13:04:34 +0200 Subject: [PATCH 117/354] Update maintenance.rst --- doc/maintenance.rst | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index c169e02..516265f 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -3,7 +3,7 @@ Create dump of production database connect using ssh to production VM -if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` .. code-block:: bash @@ -24,7 +24,7 @@ Connect using ssh to new VM docker stop dataverse -If you are using a dockerized version : ``docker exec -it postgress /bin/sh`` +If you are using a dockerized version : ``docker exec -it postgres /bin/sh`` .. code-block:: bash @@ -43,18 +43,18 @@ Usefull database alteration Replace production DOI with test DOI ------------------------------------ -if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` .. code-block:: bash - Su postgres + su postgres psql -U dataverse dataverse update dvobject set authority='10.21337' where authority like '%10.18710%'; Change dataverse admin password ------------------------------- -if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` .. code-block:: bash @@ -66,12 +66,12 @@ if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` Change the database passord --------------------------- -if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` this needs to be consistent with the passord in ``secrets/db/password`` and in ``.env`` .. code-block:: bash - Su postgres + su postgres ALTER USER DATAVERSE WITH PASSWORD ''; @@ -79,11 +79,11 @@ this needs to be consistent with the passord in ``secrets/db/password`` and in ` Change feide login endpoint --------------------------- -if you are using a dockerized version : ``docker exec -it postgress /bin/sh`` +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` .. code-block:: bash - Su postgres + su postgres psql -U dataverse dataverse sql update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.'); From 2ee8fca2944af71e0623775fff96c543bb574545 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 13:04:52 +0200 Subject: [PATCH 118/354] Update maintenance.rst --- doc/maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 516265f..ee73a42 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -7,7 +7,7 @@ if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` .. code-block:: bash - Su postgres + su postgres pg_dump -U dataverse dataverse > /tmp/dataverse.dump; Transmit dump file to appropriate vm using rsync ``rsync -arvzP --rsh=ssh :/tmp/dataverse.dump :/tmp/dataverse.dump `` From e584db181eed8df5dad3d0f82eb421beb5fcba4e Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 13:50:47 +0200 Subject: [PATCH 119/354] Update maintenance.rst --- doc/maintenance.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index ee73a42..674a600 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -93,6 +93,9 @@ Change File storage location NOT READY YET +File stored in S3 : S3://10.21337/WFD8O0 +File stored in local`` select * from dvobject where identifier like '%XCCW4L%';``: file://10.21337/XCCW4L + update dvobject set storageidentifier='S3://2002-green-dataversenotest1:' where dtype='Dataset'; UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); From 9f0cfd934b7f25466be603c31d56d2e918c81035 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 14:07:10 +0200 Subject: [PATCH 120/354] Update maintenance.rst --- doc/maintenance.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 674a600..bf29ac0 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -103,6 +103,8 @@ UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3:// +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + From 7bb5298397626170cc52267168b5d4cd36081162 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 14:08:36 +0200 Subject: [PATCH 121/354] Update maintenance.rst --- doc/maintenance.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index bf29ac0..3445280 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -94,7 +94,8 @@ Change File storage location NOT READY YET File stored in S3 : S3://10.21337/WFD8O0 -File stored in local`` select * from dvobject where identifier like '%XCCW4L%';``: file://10.21337/XCCW4L + +File stored in local `` select * from dvobject where identifier like '%XCCW4L%'; `` : file://10.21337/XCCW4L update dvobject set storageidentifier='S3://2002-green-dataversenotest1:' where dtype='Dataset'; From 4298c78e26f458c2a7089077b59fb7a3b7040baf Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 14:10:43 +0200 Subject: [PATCH 122/354] Update maintenance.rst --- doc/maintenance.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 3445280..928e556 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -109,6 +109,6 @@ UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://',' Date: Tue, 20 Sep 2022 14:31:28 +0200 Subject: [PATCH 123/354] Update maintenance.rst --- doc/maintenance.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 928e556..4f2ed36 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -106,6 +106,7 @@ UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3:// UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); From 3c4d54679ca54f665badc5dd0abe175dc9e39192 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Tue, 20 Sep 2022 14:59:57 +0200 Subject: [PATCH 124/354] Update maintenance.rst --- doc/maintenance.rst | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 4f2ed36..06a41a0 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -110,6 +110,16 @@ UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3:// -Coppy filest to and from S3 storage +Copy files to and from S3 storage ----------------------------------- + +Get MDF5 for the files uploaded today +------------------------------------- + +select * from dvobject as dv, datafile as df where dv.dtype='DataFile' and modificationtime>='2022-09-20' and dv.id=df.id order by df.id desc limit 10; + + +the mdf is corespmding to the etag in cloudian + + From c00a4ce2737b8ec9a286eff93ec8fe377fbc323c Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 09:11:06 +0200 Subject: [PATCH 125/354] Update maintenance.rst --- doc/maintenance.rst | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 06a41a0..9dfa971 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -95,18 +95,20 @@ NOT READY YET File stored in S3 : S3://10.21337/WFD8O0 -File stored in local `` select * from dvobject where identifier like '%XCCW4L%'; `` : file://10.21337/XCCW4L - -update dvobject set storageidentifier='S3://2002-green-dataversenotest1:' where dtype='Dataset'; - -UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); +File stored in local +`` select * from dvobject where identifier like '%XCCW4L%'; `` : file://10.21337/XCCW4L +.. code-block:: bash + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); +in the folwing exemple = S3 and = 2002-green-dataversenotest1 -UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); +.. code-block:: bash -UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); From 31e0ed1787481ec10344e138e5d2fdae34ded33a Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 09:16:24 +0200 Subject: [PATCH 126/354] Update maintenance.rst --- doc/maintenance.rst | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 9dfa971..a702717 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -98,12 +98,19 @@ File stored in S3 : S3://10.21337/WFD8O0 File stored in local `` select * from dvobject where identifier like '%XCCW4L%'; `` : file://10.21337/XCCW4L +the following update statement is to update the files while not affecting the exernal datasets harvested form other locations listed in table 'dataset'. + .. code-block:: bash UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); - UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); -in the folwing exemple = S3 and = 2002-green-dataversenotest1 +the following update statement is to update the datasets while not affecting the exernal datasets harvested form other locations listed in table 'dataset'. + +.. code-block:: bash + + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + +in the following exemple = S3 and = 2002-green-dataversenotest1 .. code-block:: bash From 093cdc76e67a3c663da8809f8372212571468c18 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 09:19:04 +0200 Subject: [PATCH 127/354] Update maintenance.rst --- doc/maintenance.rst | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index a702717..e6b558d 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -88,6 +88,10 @@ if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` sql update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.'); +Copy files to and from S3 storage +----------------------------------- + + Change File storage location ---------------------------- @@ -98,19 +102,19 @@ File stored in S3 : S3://10.21337/WFD8O0 File stored in local `` select * from dvobject where identifier like '%XCCW4L%'; `` : file://10.21337/XCCW4L -the following update statement is to update the files while not affecting the exernal datasets harvested form other locations listed in table 'dataset'. +the following update statement is to update the files while not affecting the external datasets harvested form other locations listed in table 'dataset'. .. code-block:: bash - UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); -the following update statement is to update the datasets while not affecting the exernal datasets harvested form other locations listed in table 'dataset'. +the following update statement is to update the datasets while not affecting the external datasets harvested form other locations listed in table 'dataset'. .. code-block:: bash - UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); -in the following exemple = S3 and = 2002-green-dataversenotest1 +in the following exemple = S3 and = 2002-green-dataversenotest1 .. code-block:: bash @@ -119,9 +123,6 @@ in the following exemple = S3 and = 2002-gr -Copy files to and from S3 storage ------------------------------------ - Get MDF5 for the files uploaded today ------------------------------------- From f5e02a8c8a58c4fa5ce9c6b01c02683936b6cf63 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 09:53:15 +0200 Subject: [PATCH 128/354] Update maintenance.rst --- doc/maintenance.rst | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index e6b558d..51fa91d 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -87,6 +87,23 @@ if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` psql -U dataverse dataverse sql update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.'); +Setting up an S3 bucket +----------------------- + +Create an S3 bucket using your prefered provider + +The asadmin commands descried in https://guides.dataverse.org/en/latest/installation/config.html#amazon-s3-storage-or-compatible anc be found in distros/dataverse.no/init.d/006-s3-aws-storage.sh One of sucj a file sould be created for every bucket. + +in .env(add link) change the folowing + +.. code-block:: bash + aws_uit_bucket_name=awsbucketname2 + aws_uit_s3_profile=uit + aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + +The credentials should be set in secrets/aws-cli/.aws/credentials + +The region and format should be set in secrets/aws-cli/.aws/config Copy files to and from S3 storage ----------------------------------- From 356016c77ff7edeb3e8101205b8629e05af29869 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 09:53:49 +0200 Subject: [PATCH 129/354] Update maintenance.rst --- doc/maintenance.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 51fa91d..8045e77 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -97,6 +97,7 @@ The asadmin commands descried in https://guides.dataverse.org/en/latest/installa in .env(add link) change the folowing .. code-block:: bash + aws_uit_bucket_name=awsbucketname2 aws_uit_s3_profile=uit aws_endpoint_url=https\:\/\/s3-oslo.educloud.no From dd1e4ba6ea4efe92c2c3d02666864db8fd5cca87 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 10:01:42 +0200 Subject: [PATCH 130/354] Update maintenance.rst --- doc/maintenance.rst | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 8045e77..742a8ce 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -98,13 +98,26 @@ in .env(add link) change the folowing .. code-block:: bash - aws_uit_bucket_name=awsbucketname2 - aws_uit_s3_profile=uit - aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + aws_uit_bucket_name= + aws_uit_s3_profile= + aws_endpoint_url= for exemple https\:\/\/s3-oslo.educloud.no + +the can be for exemple 'https\:\/\/s3-oslo.educloud.no' , special caracters neds to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config + +The region and format should be set in secrets/aws-cli/.aws/config if using a custom endpoint should be set to a non existant region. + +.. code-block:: bash + [] + output = json + region = The credentials should be set in secrets/aws-cli/.aws/credentials -The region and format should be set in secrets/aws-cli/.aws/config +.. code-block:: bash + + [] + aws_access_key_id= + aws_secret_access_key= Copy files to and from S3 storage ----------------------------------- From 7970d5c101a7ffc8716a98bd40c0cb1e22f78e1c Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 10:06:34 +0200 Subject: [PATCH 131/354] Update maintenance.rst --- doc/maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 742a8ce..4c8657b 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -92,7 +92,7 @@ Setting up an S3 bucket Create an S3 bucket using your prefered provider -The asadmin commands descried in https://guides.dataverse.org/en/latest/installation/config.html#amazon-s3-storage-or-compatible anc be found in distros/dataverse.no/init.d/006-s3-aws-storage.sh One of sucj a file sould be created for every bucket. +The asadmin commands descried in https://guides.dataverse.org/en/latest/installation/config.html#amazon-s3-storage-or-compatible and be found in distros/dataverse.no/init.d/006-s3-aws-storage.sh(linked) Create one file for every new bucket. in .env(add link) change the folowing From e850a54de258b019dc20a62f131c2be647b6c99b Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 10:08:06 +0200 Subject: [PATCH 132/354] Update maintenance.rst --- doc/maintenance.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 4c8657b..11662db 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -100,9 +100,9 @@ in .env(add link) change the folowing aws_uit_bucket_name= aws_uit_s3_profile= - aws_endpoint_url= for exemple https\:\/\/s3-oslo.educloud.no + aws_endpoint_url= -the can be for exemple 'https\:\/\/s3-oslo.educloud.no' , special caracters neds to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config +the can be for exemple `` https\:\/\/s3-oslo.educloud.no `` , special caracters neds to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config The region and format should be set in secrets/aws-cli/.aws/config if using a custom endpoint should be set to a non existant region. From 2e8c5449981025f156621db154ad68186e0bc573 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 10:09:21 +0200 Subject: [PATCH 133/354] Update maintenance.rst --- doc/maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 11662db..eb5b25d 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -102,7 +102,7 @@ in .env(add link) change the folowing aws_uit_s3_profile= aws_endpoint_url= -the can be for exemple `` https\:\/\/s3-oslo.educloud.no `` , special caracters neds to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config +The can ``testcode`` be for exemple ``https\:\/\/s3-oslo.educloud.no`` , special caracters neds to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config The region and format should be set in secrets/aws-cli/.aws/config if using a custom endpoint should be set to a non existant region. From 3026d9a7061249a21fac1587fe779ce992154f5d Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 10:10:20 +0200 Subject: [PATCH 134/354] Update maintenance.rst --- doc/maintenance.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index eb5b25d..e0b652d 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -102,7 +102,7 @@ in .env(add link) change the folowing aws_uit_s3_profile= aws_endpoint_url= -The can ``testcode`` be for exemple ``https\:\/\/s3-oslo.educloud.no`` , special caracters neds to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config +The can be for exemple ``https\:\/\/s3-oslo.educloud.no`` , specials caracters need to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config The region and format should be set in secrets/aws-cli/.aws/config if using a custom endpoint should be set to a non existant region. @@ -131,7 +131,7 @@ NOT READY YET File stored in S3 : S3://10.21337/WFD8O0 File stored in local -`` select * from dvobject where identifier like '%XCCW4L%'; `` : file://10.21337/XCCW4L +``select * from dvobject where identifier like '%XCCW4L%';`` : file://10.21337/XCCW4L the following update statement is to update the files while not affecting the external datasets harvested form other locations listed in table 'dataset'. From 9603aa7f70ea843777831b56b2ca2b0d0a5b21d1 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 11:13:43 +0200 Subject: [PATCH 135/354] Create customization --- doc/customization | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 doc/customization diff --git a/doc/customization b/doc/customization new file mode 100644 index 0000000..3c9726a --- /dev/null +++ b/doc/customization @@ -0,0 +1,14 @@ +login page +---------- + + +Header +------ + +footer +------ + +analitics matomo +---------------- + +init.d/100-analytics.sh From 1914c581c564362cfc7a3068b26d69c3d7924704 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 11:14:03 +0200 Subject: [PATCH 136/354] Rename customization to customization.rst --- doc/{customization => customization.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/{customization => customization.rst} (100%) diff --git a/doc/customization b/doc/customization.rst similarity index 100% rename from doc/customization rename to doc/customization.rst From b0f963e2a56c3c279d8f92b210d9211c92bb809a Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Wed, 21 Sep 2022 11:22:53 +0200 Subject: [PATCH 137/354] Update customization.rst --- doc/customization.rst | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/doc/customization.rst b/doc/customization.rst index 3c9726a..c7e4764 100644 --- a/doc/customization.rst +++ b/doc/customization.rst @@ -1,14 +1,24 @@ login page ---------- +init.d/204-custumisation.sh Header ------ +init.d/204-custumisation.sh + footer ------ +init.d/204-custumisation.sh + analitics matomo ---------------- init.d/100-analytics.sh + +dataverse support form changed to email +--------------------------------------- + +init.d/201-bundle.sh From 53ab84c207514e14d918741c2f8dff93d73aed6f Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 23 Sep 2022 10:17:39 +0200 Subject: [PATCH 138/354] Update maintenance.rst --- doc/maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index e0b652d..d45e091 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -92,7 +92,7 @@ Setting up an S3 bucket Create an S3 bucket using your prefered provider -The asadmin commands descried in https://guides.dataverse.org/en/latest/installation/config.html#amazon-s3-storage-or-compatible and be found in distros/dataverse.no/init.d/006-s3-aws-storage.sh(linked) Create one file for every new bucket. +The asadmin commands descried in https://guides.dataverse.org/en/latest/installation/config.html#amazon-s3-storage-or-compatible and be found in `distros/dataverse.no/init.d/006-s3-aws-storage.sh `_ Create one file for every new bucket. in .env(add link) change the folowing From 4537e04c6519aa18ba31854915b71cf43b37c6e3 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 23 Sep 2022 10:22:36 +0200 Subject: [PATCH 139/354] Update maintenance.rst --- doc/maintenance.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index d45e091..7527452 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -102,16 +102,17 @@ in .env(add link) change the folowing aws_uit_s3_profile= aws_endpoint_url= -The can be for exemple ``https\:\/\/s3-oslo.educloud.no`` , specials caracters need to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in secrets/aws-cli/.aws/config +The can be for exemple ``https\:\/\/s3-oslo.educloud.no`` , specials caracters need to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in `secrets/aws-cli/.aws/config`_ -The region and format should be set in secrets/aws-cli/.aws/config if using a custom endpoint should be set to a non existant region. +The region and format should be set in `secrets/aws-cli/.aws/config`_ if using a custom endpoint should be set to a non existant region. +.._secrets/aws-cli/.aws/config: https://github.com/DataverseNO/dataverse-docker/blob/dataverse.no/secrets/aws-cli/.aws/config/ .. code-block:: bash [] output = json region = -The credentials should be set in secrets/aws-cli/.aws/credentials +The credentials should be set in `secrets/aws-cli/.aws/credentials `_ .. code-block:: bash @@ -126,7 +127,6 @@ Copy files to and from S3 storage Change File storage location ---------------------------- -NOT READY YET File stored in S3 : S3://10.21337/WFD8O0 From 9f85c58c00d4d283a21bf71856dec49f1d65464c Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 23 Sep 2022 10:23:24 +0200 Subject: [PATCH 140/354] Update maintenance.rst --- doc/maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index 7527452..fc6feec 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -106,7 +106,7 @@ The can be for exemple ``https\:\/\/s3-oslo.educloud.no`` , speci The region and format should be set in `secrets/aws-cli/.aws/config`_ if using a custom endpoint should be set to a non existant region. -.._secrets/aws-cli/.aws/config: https://github.com/DataverseNO/dataverse-docker/blob/dataverse.no/secrets/aws-cli/.aws/config/ +.. _secrets/aws-cli/.aws/config: https://github.com/DataverseNO/dataverse-docker/blob/dataverse.no/secrets/aws-cli/.aws/config/ .. code-block:: bash [] output = json From 793de2cfb25ad2d3a667fd6a4d4cafaf12bb2076 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 23 Sep 2022 10:12:46 +0000 Subject: [PATCH 141/354] added automated Sql exectution per https://github.com/DataverseNO/local.dataverse.no/issues/24 --- distros/dataverse.no/init.d/0001-cleanup.sh | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 distros/dataverse.no/init.d/0001-cleanup.sh diff --git a/distros/dataverse.no/init.d/0001-cleanup.sh b/distros/dataverse.no/init.d/0001-cleanup.sh new file mode 100644 index 0000000..39e64b1 --- /dev/null +++ b/distros/dataverse.no/init.d/0001-cleanup.sh @@ -0,0 +1,7 @@ +#!/bin/bash/ +if [ -f /tmp/cleanup.sql ] +then + export PGPASSWORD=`cat /secrets/db/password` + psql -U dataverse dataverse -h postgres -f /tmp/cleanup.sql + mv /tmp/cleanup.sql /tmp/cleanup.sql-used +fi From 1dc686033d3a09ef5e87015e7431dfc38b5cad32 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Thu, 20 Oct 2022 13:06:00 +0000 Subject: [PATCH 142/354] added 204 customisation --- distros/dataverse.no/init.d/204-custumisation.sh | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 distros/dataverse.no/init.d/204-custumisation.sh diff --git a/distros/dataverse.no/init.d/204-custumisation.sh b/distros/dataverse.no/init.d/204-custumisation.sh new file mode 100644 index 0000000..970ed04 --- /dev/null +++ b/distros/dataverse.no/init.d/204-custumisation.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/loginpage.xhtml -O /tmp/loginpage.xhtml +wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/dataverse_header.xhtml -O /tmp/dataverse_header.xhtml +wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/dataverse_footer.xhtml -O /tmp/dataverse_footer.xhtml + + +cp /tmp/loginpage.xhtml /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/loginpage.xhtml +cp /tmp/dataverse_header.xhtml /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/dataverse_header.xhtml +cp /tmp/dataverse_footer.xhtml /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/dataverse_footer.xhtml + From 54ce692e290c5ef9434a3e0b6a27a06415bd6d95 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 21 Oct 2022 09:03:25 +0000 Subject: [PATCH 143/354] added azure login and dvwebloader --- .gitignore | 1 + distros/dataverse.no/docker-compose.yaml | 7 ++++-- .../dataverse.no/init.d/055-dvwebloader.sh | 25 +++++++++++++++++++ .../dataverse.no/init.d/081-azure-login.sh | 7 ++++++ start.sh | 5 ++++ 5 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 distros/dataverse.no/init.d/055-dvwebloader.sh create mode 100755 distros/dataverse.no/init.d/081-azure-login.sh create mode 100755 start.sh diff --git a/.gitignore b/.gitignore index 09c4d5d..ade617f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ dataverse.war .env +.env.2* .gitignore #Ignoring IDE files diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 2be8d72..4760c4f 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -47,9 +47,9 @@ services: - "POSTGRES_PASSWORD" - "POSTGRES_PORT" volumes: - - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down + # - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down - ${POSTGRESTMP}/:/mnttmp/ - #- /extdisk/database-data-prod:/var/lib/postgresql/data/ + - /extdisk/database-data-prod:/var/lib/postgresql/data/ #- /extdisk/database-data-demo:/var/lib/postgresql/data/ @@ -202,6 +202,7 @@ services: - "aws_endpoint_url" - "aws_uit_bucket_name" - "aws_uit_s3_profile" + - "azure_json_file" - "system_email" - "mailhost" - "mailuser" @@ -242,6 +243,7 @@ services: - "SOLR_LOCATION=solr:8983" - "INIT_SCRIPTS_FOLDER" - "hostname" + - "PASSWORD_FILE" - "POSTGRES_SERVER" - "POSTGRES_PORT" - "POSTGRES_DATABASE" @@ -258,6 +260,7 @@ services: - "COUNTERPROSVERSION" - "GEOIPLICENSE" - "CONFIG_FILE" + - "PAYARA_ARGS=--debug" depends_on: - postgres - solr diff --git a/distros/dataverse.no/init.d/055-dvwebloader.sh b/distros/dataverse.no/init.d/055-dvwebloader.sh new file mode 100644 index 0000000..82932d2 --- /dev/null +++ b/distros/dataverse.no/init.d/055-dvwebloader.sh @@ -0,0 +1,25 @@ +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName": "Dataverse WebLoader", + "description": "Upload all the files in a local directory!", + "toolName": "dvwebloader", + "scope": "dataset", + "contentType":"text/plain", + "types": [ + "explore" + ], + "toolUrl": "https://gdcc.github.io/dvwebloader/src/dvwebloader.html", + "toolParameters": { + "queryParameters": [ + { + "siteUrl": "{siteUrl}" + }, + { + "datasetPid": "{datasetPid}" + }, + { + "key": "{apiToken}" + } + ] + } +}' diff --git a/distros/dataverse.no/init.d/081-azure-login.sh b/distros/dataverse.no/init.d/081-azure-login.sh new file mode 100755 index 0000000..bfef879 --- /dev/null +++ b/distros/dataverse.no/init.d/081-azure-login.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Federated login activation +# https://guides.dataverse.org/en/latest/installation/shibboleth.html +if [ "${azure_json_file}" ]; then + curl -X POST -H 'Content-type: application/json' --upload-file ${azure_json_file} http://localhost:8080/api/admin/authenticationProviders +fi diff --git a/start.sh b/start.sh new file mode 100755 index 0000000..5de0b4d --- /dev/null +++ b/start.sh @@ -0,0 +1,5 @@ +#!/bin/bash +docker-compose up -d +sleep 60 +docker exec dataverse asadmin --user=admin --passwordfile=/opt/payara/passwordFile deploy /opt/payara/deployments/dataverse.war + From 27d188be4d367b23ad65df8049cf09a9c1f54121 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 21 Oct 2022 09:19:15 +0000 Subject: [PATCH 144/354] fixed postgres --- distros/dataverse.no/docker-compose.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 4760c4f..88fadd5 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -47,9 +47,9 @@ services: - "POSTGRES_PASSWORD" - "POSTGRES_PORT" volumes: - # - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down + - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down - ${POSTGRESTMP}/:/mnttmp/ - - /extdisk/database-data-prod:/var/lib/postgresql/data/ + #- /extdisk/database-data-prod:/var/lib/postgresql/data/ #- /extdisk/database-data-demo:/var/lib/postgresql/data/ From 3b1904da4aaf319973401dda10895418d8359303 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 21 Oct 2022 11:19:44 +0000 Subject: [PATCH 145/354] added dataverse.siteUrl jvm option --- distros/dataverse.no/init.d/04-setdomain.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/distros/dataverse.no/init.d/04-setdomain.sh b/distros/dataverse.no/init.d/04-setdomain.sh index 065e83a..aa1e168 100755 --- a/distros/dataverse.no/init.d/04-setdomain.sh +++ b/distros/dataverse.no/init.d/04-setdomain.sh @@ -8,3 +8,5 @@ siteURLcmd=dataverse.siteUrl='https\:\/\/'${hostname} echo $siteURLcmd asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $siteURLcmd asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $hostnamecmd +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options -$siteURLcmd + From 05b51c8ae8e60b964aeeb8f94d59587dacc24af6 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 11 Nov 2022 09:51:03 +0000 Subject: [PATCH 146/354] counter prossesor and dvwebloader --- distros/dataverse.no/init.d/0001-cleanup.sh | 6 +-- .../dataverse.no/init.d/055-dvwebloader.sh | 42 ++++++++++++------- .../init.d/203-counterprocessor.sh | 6 +-- .../init.d/cronjob/makedatacount.sh | 3 +- 4 files changed, 34 insertions(+), 23 deletions(-) diff --git a/distros/dataverse.no/init.d/0001-cleanup.sh b/distros/dataverse.no/init.d/0001-cleanup.sh index 39e64b1..d5b2f09 100644 --- a/distros/dataverse.no/init.d/0001-cleanup.sh +++ b/distros/dataverse.no/init.d/0001-cleanup.sh @@ -1,7 +1,7 @@ #!/bin/bash/ -if [ -f /tmp/cleanup.sql ] +if [ -f /mnt/tmp/cleanup.sql ] then export PGPASSWORD=`cat /secrets/db/password` - psql -U dataverse dataverse -h postgres -f /tmp/cleanup.sql - mv /tmp/cleanup.sql /tmp/cleanup.sql-used + psql -U dataverse dataverse -h postgres -f /mnt/tmp/cleanup.sql + mv /mnt/tmp/cleanup.sql /mnt/tmp/cleanup.sql-used fi diff --git a/distros/dataverse.no/init.d/055-dvwebloader.sh b/distros/dataverse.no/init.d/055-dvwebloader.sh index 82932d2..bf2f57f 100644 --- a/distros/dataverse.no/init.d/055-dvwebloader.sh +++ b/distros/dataverse.no/init.d/055-dvwebloader.sh @@ -1,25 +1,37 @@ +#!/bin/bash + +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/webloader_check.sql -o /tmp/output +EXIST=`grep Dataverse /tmp/output` + +wget https://github.com/DataverseNO/dvwebloader/archive/refs/heads/main.zip -O /tmp/dvwebloader.zip +unzip -o /tmp/dvwebloader.zip -d $DOCROOT_DIR/logos + +if [[ -z $EXIST ]]; then +echo "Loaded" curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ -'{ - "displayName": "Dataverse WebLoader", - "description": "Upload all the files in a local directory!", - "toolName": "dvwebloader", - "scope": "dataset", - "contentType":"text/plain", - "types": [ - "explore" +"{ + \"displayName\": \"Dataverse WebLoader\", + \"description\": \"Upload all the files in a local directory!\", + \"toolName\": \"dvwebloader\", + \"scope\": \"dataset\", + \"contentType\":\"text/plain\", + \"types\": [ + \"explore\" ], - "toolUrl": "https://gdcc.github.io/dvwebloader/src/dvwebloader.html", - "toolParameters": { - "queryParameters": [ + \"toolUrl\": \"https://${hostname}/logos/dvwebloader-main/src/dvwebloader.html\", + \"toolParameters\": { + \"queryParameters\": [ { - "siteUrl": "{siteUrl}" + \"siteUrl\": \"{siteUrl}\" }, { - "datasetPid": "{datasetPid}" + \"datasetPid\": \"{datasetPid}\" }, { - "key": "{apiToken}" + \"key\": \"{apiToken}\" } ] } -}' +}" +fi diff --git a/distros/dataverse.no/init.d/203-counterprocessor.sh b/distros/dataverse.no/init.d/203-counterprocessor.sh index 099dbca..4de989f 100644 --- a/distros/dataverse.no/init.d/203-counterprocessor.sh +++ b/distros/dataverse.no/init.d/203-counterprocessor.sh @@ -13,11 +13,9 @@ curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Countr wget https://guides.dataverse.org/en/latest/_downloads/a65ffc2dba9f406858591558ae92790c/setup-counter-processor.sh -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/setup-counter-processor.sh wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce5e6b83e/counter-processor-config.yaml -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/counter-processor-config.yaml -curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/mdc' http://localhost:8080/api/admin/settings/:MDCLogPath +curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/makeDataCount' http://localhost:8080/api/admin/settings/:MDCLogPath curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics + pip3 install -r requirements.txt --ignore-installed PyYAML export ALLOWED_ENV=year_month -curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath -curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics - diff --git a/distros/dataverse.no/init.d/cronjob/makedatacount.sh b/distros/dataverse.no/init.d/cronjob/makedatacount.sh index ebf6c18..50f5158 100644 --- a/distros/dataverse.no/init.d/cronjob/makedatacount.sh +++ b/distros/dataverse.no/init.d/cronjob/makedatacount.sh @@ -1,4 +1,5 @@ #!/bin/bash -export YEAR_MONTH=$(date '+%Y-%m') +#export YEAR_MONTH=$(date '+%Y-%m') +export YEAR_MONTH=2022-09 cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION} python3.8 main.py From 157fa26617a7207c39186fff77467b27359cedb9 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Fri, 18 Nov 2022 10:31:31 +0000 Subject: [PATCH 147/354] fixed dual strating issues --- .../init.d/affiliations/webloader_check.sql | 1 + distros/dataverse.no/init.d/preboot.sh | 12 ++++++ .../dataverse.no/runOnce/055-dvwebloader.sh | 37 +++++++++++++++++++ 3 files changed, 50 insertions(+) create mode 100644 distros/dataverse.no/init.d/affiliations/webloader_check.sql create mode 100644 distros/dataverse.no/init.d/preboot.sh create mode 100644 distros/dataverse.no/runOnce/055-dvwebloader.sh diff --git a/distros/dataverse.no/init.d/affiliations/webloader_check.sql b/distros/dataverse.no/init.d/affiliations/webloader_check.sql new file mode 100644 index 0000000..5c5ef0a --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/webloader_check.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Dataverse WebLoader'; diff --git a/distros/dataverse.no/init.d/preboot.sh b/distros/dataverse.no/init.d/preboot.sh new file mode 100644 index 0000000..f6879d9 --- /dev/null +++ b/distros/dataverse.no/init.d/preboot.sh @@ -0,0 +1,12 @@ +#/bin/bash /opt/payara/init.d/006-s3-aws-storage.sh +#cp -R /secrets/aws-cli/.aws /root/.aws +echo > ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.type=s3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.label=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.bucket-name=${aws_bucket_name}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.download-redirect=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.upload-redirect=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.url-expiration-minutes=120" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.connection-pool-size=4096" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.storage-driver-id=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.profile=${aws_s3_profile}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara diff --git a/distros/dataverse.no/runOnce/055-dvwebloader.sh b/distros/dataverse.no/runOnce/055-dvwebloader.sh new file mode 100644 index 0000000..bf2f57f --- /dev/null +++ b/distros/dataverse.no/runOnce/055-dvwebloader.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/webloader_check.sql -o /tmp/output +EXIST=`grep Dataverse /tmp/output` + +wget https://github.com/DataverseNO/dvwebloader/archive/refs/heads/main.zip -O /tmp/dvwebloader.zip +unzip -o /tmp/dvwebloader.zip -d $DOCROOT_DIR/logos + +if [[ -z $EXIST ]]; then +echo "Loaded" +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +"{ + \"displayName\": \"Dataverse WebLoader\", + \"description\": \"Upload all the files in a local directory!\", + \"toolName\": \"dvwebloader\", + \"scope\": \"dataset\", + \"contentType\":\"text/plain\", + \"types\": [ + \"explore\" + ], + \"toolUrl\": \"https://${hostname}/logos/dvwebloader-main/src/dvwebloader.html\", + \"toolParameters\": { + \"queryParameters\": [ + { + \"siteUrl\": \"{siteUrl}\" + }, + { + \"datasetPid\": \"{datasetPid}\" + }, + { + \"key\": \"{apiToken}\" + } + ] + } +}" +fi From db78c918587545ddd4935fff85774a0438bd7c5a Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 22 Nov 2022 13:46:48 +0000 Subject: [PATCH 148/354] check if previewers exit --- distros/dataverse.no/init.d/07-previewers.sh | 149 ++++++++++++++---- .../init.d/affiliations/play_audio.sql | 1 + .../init.d/affiliations/play_audio_mp3.sql | 1 + .../init.d/affiliations/play_audio_mpeg.sql | 1 + .../init.d/affiliations/play_audio_ogg.sql | 1 + .../init.d/affiliations/play_audio_wav.sql | 1 + .../init.d/affiliations/play_video.sql | 1 + .../init.d/affiliations/play_video_mp4.sql | 1 + .../init.d/affiliations/play_video_ogg.sql | 1 + .../affiliations/play_video_quicktime.sql | 1 + .../init.d/affiliations/read_document.sql | 1 + .../init.d/affiliations/read_text.sql | 1 + .../init.d/affiliations/read_text_plain.sql | 1 + .../init.d/affiliations/view_annotation.sql | 1 + .../init.d/affiliations/view_csv.sql | 1 + .../init.d/affiliations/view_csv_tabular.sql | 1 + .../init.d/affiliations/view_data.sql | 1 + .../init.d/affiliations/view_html.sql | 1 + .../init.d/affiliations/view_html_type.sql | 1 + .../init.d/affiliations/view_hypothesis.sql | 1 + .../init.d/affiliations/view_image.sql | 1 + .../init.d/affiliations/view_image_gif.sql | 1 + .../init.d/affiliations/view_image_jpeg.sql | 1 + .../init.d/affiliations/view_image_png.sql | 1 + .../init.d/affiliations/view_pdf.sql | 1 + .../init.d/affiliations/view_r.sql | 1 + .../init.d/affiliations/view_r_file.sql | 1 + .../init.d/affiliations/view_stata.sql | 1 + .../init.d/affiliations/view_tsv.sql | 1 + 29 files changed, 143 insertions(+), 34 deletions(-) create mode 100644 distros/dataverse.no/init.d/affiliations/play_audio.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_audio_mp3.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_audio_mpeg.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_audio_ogg.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_audio_wav.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_video.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_video_mp4.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_video_ogg.sql create mode 100644 distros/dataverse.no/init.d/affiliations/play_video_quicktime.sql create mode 100644 distros/dataverse.no/init.d/affiliations/read_document.sql create mode 100644 distros/dataverse.no/init.d/affiliations/read_text.sql create mode 100644 distros/dataverse.no/init.d/affiliations/read_text_plain.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_annotation.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_csv.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_csv_tabular.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_data.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_html.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_html_type.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_hypothesis.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_image.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_image_gif.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_image_jpeg.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_image_png.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_pdf.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_r.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_r_file.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_stata.sql create mode 100644 distros/dataverse.no/init.d/affiliations/view_tsv.sql diff --git a/distros/dataverse.no/init.d/07-previewers.sh b/distros/dataverse.no/init.d/07-previewers.sh index d71bb86..fc1a26d 100755 --- a/distros/dataverse.no/init.d/07-previewers.sh +++ b/distros/dataverse.no/init.d/07-previewers.sh @@ -1,38 +1,119 @@ #!/bin/bash - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }" +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/read_text_plain.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` + +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_html_type.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mp3.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mpeg.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_wav.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_ogg.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_gif.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_jpeg.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_png.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_pdf.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_mp4.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_ogg.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_quicktime.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/csv\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv_tabular.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_tsv.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_stata.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_r_file.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }" +fi + +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_hypothesis.sql -o /tmp/toolexist.status +EXIST=`grep '0 rows' /tmp/toolexist.status` +if [[ -n $EXIST ]]; then + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }" +fi - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }" - - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://globaldataversecommunityconsortium.github.io/dataverse-previewers/previewers/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }" diff --git a/distros/dataverse.no/init.d/affiliations/play_audio.sql b/distros/dataverse.no/init.d/affiliations/play_audio.sql new file mode 100644 index 0000000..f368f52 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_mp3.sql b/distros/dataverse.no/init.d/affiliations/play_audio_mp3.sql new file mode 100644 index 0000000..2750674 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_mp3.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/mp3'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_mpeg.sql b/distros/dataverse.no/init.d/affiliations/play_audio_mpeg.sql new file mode 100644 index 0000000..66f6fc8 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_mpeg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/mpeg'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_ogg.sql b/distros/dataverse.no/init.d/affiliations/play_audio_ogg.sql new file mode 100644 index 0000000..3a7d4ed --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_ogg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/ogg'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_wav.sql b/distros/dataverse.no/init.d/affiliations/play_audio_wav.sql new file mode 100644 index 0000000..0e02df2 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_wav.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/wav'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video.sql b/distros/dataverse.no/init.d/affiliations/play_video.sql new file mode 100644 index 0000000..61eb4c0 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video_mp4.sql b/distros/dataverse.no/init.d/affiliations/play_video_mp4.sql new file mode 100644 index 0000000..b14d4c6 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video_mp4.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video' and contenttype='video/mp4'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video_ogg.sql b/distros/dataverse.no/init.d/affiliations/play_video_ogg.sql new file mode 100644 index 0000000..1c997af --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video_ogg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video' and contenttype='video/ogg'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video_quicktime.sql b/distros/dataverse.no/init.d/affiliations/play_video_quicktime.sql new file mode 100644 index 0000000..6e99d94 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video_quicktime.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video' and contenttype='video/quicktime'; diff --git a/distros/dataverse.no/init.d/affiliations/read_document.sql b/distros/dataverse.no/init.d/affiliations/read_document.sql new file mode 100644 index 0000000..d54d3f6 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/read_document.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document'; diff --git a/distros/dataverse.no/init.d/affiliations/read_text.sql b/distros/dataverse.no/init.d/affiliations/read_text.sql new file mode 100644 index 0000000..782e491 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/read_text.sql @@ -0,0 +1 @@ +select * from externaltool where displayname='Read Text' and contenttype='text/plain'; diff --git a/distros/dataverse.no/init.d/affiliations/read_text_plain.sql b/distros/dataverse.no/init.d/affiliations/read_text_plain.sql new file mode 100644 index 0000000..782e491 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/read_text_plain.sql @@ -0,0 +1 @@ +select * from externaltool where displayname='Read Text' and contenttype='text/plain'; diff --git a/distros/dataverse.no/init.d/affiliations/view_annotation.sql b/distros/dataverse.no/init.d/affiliations/view_annotation.sql new file mode 100644 index 0000000..97d252e --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_annotation.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Annotations'; diff --git a/distros/dataverse.no/init.d/affiliations/view_csv.sql b/distros/dataverse.no/init.d/affiliations/view_csv.sql new file mode 100644 index 0000000..2cae4e5 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_csv.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data' and contenttype='text/csv'; diff --git a/distros/dataverse.no/init.d/affiliations/view_csv_tabular.sql b/distros/dataverse.no/init.d/affiliations/view_csv_tabular.sql new file mode 100644 index 0000000..ca83677 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_csv_tabular.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data' and contenttype='text/comma-separated-values'; diff --git a/distros/dataverse.no/init.d/affiliations/view_data.sql b/distros/dataverse.no/init.d/affiliations/view_data.sql new file mode 100644 index 0000000..8dc243a --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_data.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data'; diff --git a/distros/dataverse.no/init.d/affiliations/view_html.sql b/distros/dataverse.no/init.d/affiliations/view_html.sql new file mode 100644 index 0000000..beedf68 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_html.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Html'; diff --git a/distros/dataverse.no/init.d/affiliations/view_html_type.sql b/distros/dataverse.no/init.d/affiliations/view_html_type.sql new file mode 100644 index 0000000..288af2d --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_html_type.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Html' and contentType='text/html'; diff --git a/distros/dataverse.no/init.d/affiliations/view_hypothesis.sql b/distros/dataverse.no/init.d/affiliations/view_hypothesis.sql new file mode 100644 index 0000000..1113a63 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_hypothesis.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Annotations' and contenttype='application/x-json-hypothesis'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image.sql b/distros/dataverse.no/init.d/affiliations/view_image.sql new file mode 100644 index 0000000..9a6d158 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image_gif.sql b/distros/dataverse.no/init.d/affiliations/view_image_gif.sql new file mode 100644 index 0000000..6ed69fb --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image_gif.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image' and contenttype='image/gif'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image_jpeg.sql b/distros/dataverse.no/init.d/affiliations/view_image_jpeg.sql new file mode 100644 index 0000000..a139467 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image_jpeg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image' and contenttype='image/jpeg'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image_png.sql b/distros/dataverse.no/init.d/affiliations/view_image_png.sql new file mode 100644 index 0000000..f2202a5 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image_png.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image' and contenttype='image/png'; diff --git a/distros/dataverse.no/init.d/affiliations/view_pdf.sql b/distros/dataverse.no/init.d/affiliations/view_pdf.sql new file mode 100644 index 0000000..fb4dd1f --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_pdf.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document' and contenttype='application/pdf'; diff --git a/distros/dataverse.no/init.d/affiliations/view_r.sql b/distros/dataverse.no/init.d/affiliations/view_r.sql new file mode 100644 index 0000000..fb4dd1f --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_r.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document' and contenttype='application/pdf'; diff --git a/distros/dataverse.no/init.d/affiliations/view_r_file.sql b/distros/dataverse.no/init.d/affiliations/view_r_file.sql new file mode 100644 index 0000000..2e45cb0 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_r_file.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View R file' and contenttype='type/x-r-syntax'; diff --git a/distros/dataverse.no/init.d/affiliations/view_stata.sql b/distros/dataverse.no/init.d/affiliations/view_stata.sql new file mode 100644 index 0000000..fb4dd1f --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_stata.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document' and contenttype='application/pdf'; diff --git a/distros/dataverse.no/init.d/affiliations/view_tsv.sql b/distros/dataverse.no/init.d/affiliations/view_tsv.sql new file mode 100644 index 0000000..6e5a064 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_tsv.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data' and contenttype='text/tab-separated-values'; From 8fab412599a839a8ffc0597237a3b6a088074ad4 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 22 Nov 2022 13:53:42 +0000 Subject: [PATCH 149/354] previewer update --- distros/dataverse.no/init.d/07-previewers.sh | 38 ++++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/distros/dataverse.no/init.d/07-previewers.sh b/distros/dataverse.no/init.d/07-previewers.sh index fc1a26d..0ab9177 100755 --- a/distros/dataverse.no/init.d/07-previewers.sh +++ b/distros/dataverse.no/init.d/07-previewers.sh @@ -5,115 +5,115 @@ psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/ EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_html_type.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mp3.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mpeg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_wav.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_ogg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_gif.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_jpeg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_png.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_pdf.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_mp4.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_ogg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_quicktime.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/csv\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/csv\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv_tabular.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_tsv.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_stata.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_r_file.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_hypothesis.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.2/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }" fi From da15f8eba256d2ce7fca0af47058c52b781da65b Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 23 Nov 2022 10:14:01 +0000 Subject: [PATCH 150/354] previewer buton ton "preview" mode --- distros/dataverse.no/init.d/07-previewers.sh | 39 ++++++++++---------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/distros/dataverse.no/init.d/07-previewers.sh b/distros/dataverse.no/init.d/07-previewers.sh index 0ab9177..7df0fa0 100755 --- a/distros/dataverse.no/init.d/07-previewers.sh +++ b/distros/dataverse.no/init.d/07-previewers.sh @@ -1,119 +1,120 @@ #!/bin/bash +# https://guides.dataverse.org/en/5.12.1/api/external-tools.html#external-tools-for-files export PGPASSWORD=`cat /secrets/db/password` psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/read_text_plain.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_html_type.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mp3.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mpeg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_wav.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_ogg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_gif.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_jpeg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_png.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_pdf.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_mp4.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_ogg.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_quicktime.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/csv\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/csv\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv_tabular.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_tsv.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_stata.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_r_file.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }" fi psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_hypothesis.sql -o /tmp/toolexist.status EXIST=`grep '0 rows' /tmp/toolexist.status` if [[ -n $EXIST ]]; then - curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"explore\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }" + curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }" fi From c6786b4b60941a21f806597c46955c0b7713fd4f Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 30 Nov 2022 11:10:22 +0000 Subject: [PATCH 151/354] Added preboot comand to save logs in server.log --- .env_sample | 4 ++++ distros/dataverse.no/init.d/{preboot.sh => 0000-preboot.sh} | 4 ++++ 2 files changed, 8 insertions(+) rename distros/dataverse.no/init.d/{preboot.sh => 0000-preboot.sh} (80%) diff --git a/.env_sample b/.env_sample index 392ab7d..7a55cee 100644 --- a/.env_sample +++ b/.env_sample @@ -12,6 +12,8 @@ DATAVERSE_DB_HOST=postgres DATAVERSE_DB_USER=dataverse DATAVERSE_DB_PASSWORD=psqlpassword DATAVERSE_DB_NAME=dataverse +PASSWORD_FILE=/secrets/asadminpwd + #SOLR SOLR_SERVICE_HOST=solr:8983 @@ -78,3 +80,5 @@ socket_port=465 # Federated authentification file # https://guides.dataverse.org/en/latest/installation/shibboleth.html federated_json_file=/secrets/openid.json +azure_json_file=/secrets/azopenid.json + diff --git a/distros/dataverse.no/init.d/preboot.sh b/distros/dataverse.no/init.d/0000-preboot.sh similarity index 80% rename from distros/dataverse.no/init.d/preboot.sh rename to distros/dataverse.no/init.d/0000-preboot.sh index f6879d9..09016a0 100644 --- a/distros/dataverse.no/init.d/preboot.sh +++ b/distros/dataverse.no/init.d/0000-preboot.sh @@ -10,3 +10,7 @@ echo "create-system-properties dataverse.files.S3.url-expiration-minutes=120" >> echo "create-system-properties dataverse.files.S3.connection-pool-size=4096" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara echo "create-system-properties dataverse.files.storage-driver-id=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara echo "create-system-properties dataverse.files.S3.profile=${aws_s3_profile}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara + + From 6e0e37207469975f88e0e3820081e9e684a9e615 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 30 Nov 2022 12:32:00 +0000 Subject: [PATCH 152/354] updated docker-compose.yaml --- distros/dataverse.no/docker-compose.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 88fadd5..e95d9d4 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -65,6 +65,8 @@ services: - "8089:80" - "443:9443" volumes: + - ${LOGS_PATH}/shibboleth/httpd:/var/log/httpd + - ${LOGS_PATH}/shibboleth/shibboleth:/var/log/shibboleth - ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth - ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf - ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/pki/tls/certs/localhost.crt @@ -265,11 +267,14 @@ services: - postgres - solr volumes: + - ${LOGS_PATH}/dataverse:/opt/payara/appserver/glassfish/domains/domain1/logs/ + - ${LOGS_PATH}/makeDataCount:/opt/payara/appserver/glassfish/domains/domain1/logs/makeDataCount - ${CONFIGURATION_PATH}/secrets:/secrets - ${LOCAL_STORAGE}/data:/data - ${DOCROOT}/docroot:/opt/payara/docroot - ./configs/domain.xml:/opt/payara/domain.xml - ./init.d:/opt/payara/init.d + - /distrib/private/secrets/init_2_conf_payara.sh:/opt/payara/scripts/init_2_conf_payara.sh - /mnt:/mnt labels: - "traefik.enable=true" From aa11b7862de83a4d189bbbee2b2dd6611ce821bd Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Tue, 6 Dec 2022 14:06:05 +0000 Subject: [PATCH 153/354] updated S3 bucket conection --- distros/dataverse.no/docker-compose.yaml | 5 +++-- distros/dataverse.no/init.d/0000-preboot.sh | 11 +++++++---- distros/dataverse.no/init.d/006-s3-aws-storage.sh | 1 - distros/dataverse.no/init.d/007-s3-aws-storage.sh | 1 - 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index e95d9d4..98716bc 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -49,8 +49,8 @@ services: volumes: - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down - ${POSTGRESTMP}/:/mnttmp/ - #- /extdisk/database-data-prod:/var/lib/postgresql/data/ - #- /extdisk/database-data-demo:/var/lib/postgresql/data/ + #- /extdisk/database-data-prod:/var/lib/postgresql/data/ + #- /extdisk/database-data-demo:/var/lib/postgresql/data/ @@ -263,6 +263,7 @@ services: - "GEOIPLICENSE" - "CONFIG_FILE" - "PAYARA_ARGS=--debug" + - "aws_config" depends_on: - postgres - solr diff --git a/distros/dataverse.no/init.d/0000-preboot.sh b/distros/dataverse.no/init.d/0000-preboot.sh index 09016a0..997bcfc 100644 --- a/distros/dataverse.no/init.d/0000-preboot.sh +++ b/distros/dataverse.no/init.d/0000-preboot.sh @@ -1,5 +1,4 @@ -#/bin/bash /opt/payara/init.d/006-s3-aws-storage.sh -#cp -R /secrets/aws-cli/.aws /root/.aws +#/bin/bash echo > ${INIT_SCRIPTS_FOLDER}/preboot.payara echo "create-system-properties dataverse.files.S3.type=s3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara echo "create-system-properties dataverse.files.S3.label=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara @@ -12,5 +11,9 @@ echo "create-system-properties dataverse.files.storage-driver-id=S3" >> ${INIT_S echo "create-system-properties dataverse.files.S3.profile=${aws_s3_profile}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara - - +keyid=$(grep 'access key' $aws_config | awk -F ': ' {'print $2'};) +secret_key=$(grep 'secret' $aws_config | awk -F ': ' {'print $2'};) +endpoint=$aws_endpoint_url +echo "create-system-properties dataverse.files.S3.access-key="$keyid >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.secret-key="$secret_key >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.custom-endpoint-url=$endpoint" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara diff --git a/distros/dataverse.no/init.d/006-s3-aws-storage.sh b/distros/dataverse.no/init.d/006-s3-aws-storage.sh index 02e7e6b..474c4dd 100755 --- a/distros/dataverse.no/init.d/006-s3-aws-storage.sh +++ b/distros/dataverse.no/init.d/006-s3-aws-storage.sh @@ -3,7 +3,6 @@ # AWS Bucket for Dataverse # https://guides.dataverse.org/en/latest/installation/config.html#id90 if [ "${aws_bucket_name}" ]; then - cp -R /secrets/aws-cli/.aws /root/.aws asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.type\=s3" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.label\=S3" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.bucket-name\=${aws_bucket_name}" diff --git a/distros/dataverse.no/init.d/007-s3-aws-storage.sh b/distros/dataverse.no/init.d/007-s3-aws-storage.sh index 3b0eba0..fb1efa4 100755 --- a/distros/dataverse.no/init.d/007-s3-aws-storage.sh +++ b/distros/dataverse.no/init.d/007-s3-aws-storage.sh @@ -3,7 +3,6 @@ # AWS Bucket for Dataverse # https://guides.dataverse.org/en/latest/installation/config.html#id90 if [ "${aws_uit_bucket_name}" ]; then - cp -R /secrets/aws-cli/.aws /root/.aws asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.type\=s3" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.label\=S3uit" asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.bucket-name\=${aws_uit_bucket_name}" From 1b96cf2fce541fd0002f598097404f91f34e0a86 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 7 Dec 2022 11:17:39 +0000 Subject: [PATCH 154/354] Fixed group user trigger --- distros/dataverse.no/init.d/202-trigger.sh | 1 + .../init.d/affiliations/groupuser_trigger.sql | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 distros/dataverse.no/init.d/affiliations/groupuser_trigger.sql diff --git a/distros/dataverse.no/init.d/202-trigger.sh b/distros/dataverse.no/init.d/202-trigger.sh index 0d371ea..5414825 100755 --- a/distros/dataverse.no/init.d/202-trigger.sh +++ b/distros/dataverse.no/init.d/202-trigger.sh @@ -3,4 +3,5 @@ python3 ${INIT_SCRIPTS_FOLDER}/affiliations/affiliation2data.py > /tmp/affiliati export PGPASSWORD=`cat /secrets/db/password` psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql.2 +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/groupuser_trigger.sql psql -U dataverse dataverse -h postgres -f /tmp/affiliations.sql diff --git a/distros/dataverse.no/init.d/affiliations/groupuser_trigger.sql b/distros/dataverse.no/init.d/affiliations/groupuser_trigger.sql new file mode 100644 index 0000000..9383859 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/groupuser_trigger.sql @@ -0,0 +1,28 @@ +DROP TRIGGER IF EXISTS group_trigger on authenticateduser; +DROP FUNCTION IF EXISTS public.groupmonitor() CASCADE; + +CREATE FUNCTION public.groupmonitor() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN insert into explicitgroup_authenticateduser + select e.id, a.id from explicitgroup as e, authenticateduser as a + + + where emailconfirmed is not null AND ( ( split_part(a.email,'@', 2) = e.displayname) or + + ( split_part(split_part(a.email,'@', 2) , '.',2 ) ||'.'|| split_part(split_part(a.email,'@', 2) , '.',3) = e.displayname) + or + + ( split_part(split_part(a.email,'@', 2) , '.',3 ) ||'.'|| split_part(split_part(a.email,'@', 2) , '.',4) = e.displayname) + ) + + and NOT EXISTS (select 1 from explicitgroup_authenticateduser + where a.id = containedauthenticatedusers_id and e.id = explicitgroup_id); + RETURN NEW; +END; +$$; + +CREATE TRIGGER group_trigger AFTER UPDATE OF emailconfirmed ON public.authenticateduser FOR EACH ROW EXECUTE PROCEDURE public.groupmonitor(); + + From 416a9fc08d682220b0bd280ebe1c425febb422e2 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Thu, 8 Dec 2022 11:42:19 +0000 Subject: [PATCH 155/354] fixed docker compose --- distros/dataverse.no/docker-compose.yaml | 2 +- distros/dataverse.no/init.d/0001-cleanup.sh | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) delete mode 100644 distros/dataverse.no/init.d/0001-cleanup.sh diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml index 98716bc..92ecfa2 100755 --- a/distros/dataverse.no/docker-compose.yaml +++ b/distros/dataverse.no/docker-compose.yaml @@ -275,7 +275,7 @@ services: - ${DOCROOT}/docroot:/opt/payara/docroot - ./configs/domain.xml:/opt/payara/domain.xml - ./init.d:/opt/payara/init.d - - /distrib/private/secrets/init_2_conf_payara.sh:/opt/payara/scripts/init_2_conf_payara.sh + # - /distrib/private/secrets/init_2_conf_payara.sh:/opt/payara/scripts/init_2_conf_payara.sh - /mnt:/mnt labels: - "traefik.enable=true" diff --git a/distros/dataverse.no/init.d/0001-cleanup.sh b/distros/dataverse.no/init.d/0001-cleanup.sh deleted file mode 100644 index d5b2f09..0000000 --- a/distros/dataverse.no/init.d/0001-cleanup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash/ -if [ -f /mnt/tmp/cleanup.sql ] -then - export PGPASSWORD=`cat /secrets/db/password` - psql -U dataverse dataverse -h postgres -f /mnt/tmp/cleanup.sql - mv /mnt/tmp/cleanup.sql /mnt/tmp/cleanup.sql-used -fi From 3b16371b7fc37d4267a80e7423a3bcd7f2191c6d Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Dec 2022 09:27:57 +0100 Subject: [PATCH 156/354] Update maintenance.rst --- doc/maintenance.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index fc6feec..d135545 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -149,11 +149,14 @@ in the following exemple = S3 and = 2002-gre .. code-block:: bash - UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE and o.dtype = 'DataFile' AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); +exemple to update for a specics owner: +.. code-block:: bash + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); Get MDF5 for the files uploaded today ------------------------------------- From 2dcfc9b198fe89dcb68df97478b279657e8d3d85 Mon Sep 17 00:00:00 2001 From: Louis-wr Date: Fri, 9 Dec 2022 09:28:49 +0100 Subject: [PATCH 157/354] Update maintenance.rst --- doc/maintenance.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/maintenance.rst b/doc/maintenance.rst index d135545..7eaf941 100644 --- a/doc/maintenance.rst +++ b/doc/maintenance.rst @@ -152,7 +152,7 @@ in the following exemple = S3 and = 2002-gre UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE and o.dtype = 'DataFile' AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); -exemple to update for a specics owner: +exemple to update for a specifics owner: .. code-block:: bash From d4d246ea7e8d017b85a0575e81f437845311ba31 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Mon, 12 Dec 2022 11:40:34 +0000 Subject: [PATCH 158/354] fix jvm option for set domain --- distros/dataverse.no/init.d/04-setdomain.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distros/dataverse.no/init.d/04-setdomain.sh b/distros/dataverse.no/init.d/04-setdomain.sh index aa1e168..053252e 100755 --- a/distros/dataverse.no/init.d/04-setdomain.sh +++ b/distros/dataverse.no/init.d/04-setdomain.sh @@ -8,5 +8,5 @@ siteURLcmd=dataverse.siteUrl='https\:\/\/'${hostname} echo $siteURLcmd asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $siteURLcmd asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $hostnamecmd -asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options -$siteURLcmd +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-D$siteURLcmd" From 1218fbfd9a45b3a7cc3583d825b8466158266027 Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 14 Dec 2022 09:20:15 +0000 Subject: [PATCH 159/354] added script for curation lables --- distros/dataverse.no/init.d/024-curation-lables.sh | 2 ++ 1 file changed, 2 insertions(+) create mode 100755 distros/dataverse.no/init.d/024-curation-lables.sh diff --git a/distros/dataverse.no/init.d/024-curation-lables.sh b/distros/dataverse.no/init.d/024-curation-lables.sh new file mode 100755 index 0000000..ae4278e --- /dev/null +++ b/distros/dataverse.no/init.d/024-curation-lables.sh @@ -0,0 +1,2 @@ +#!/bin/bash +curl -X PUT -d '{"Standard Process":["Curator Assigned", "In Curation", "Awaiting Reply", "Legal/Ethical Concerns", "Awaiting Final Approval", "In Double-Blind Review", "Awaiting Article Publication", "Candidate for Deletion"], "Alternate Process":["State 1","State 2","State 3"]}' http://localhost:8080/api/admin/settings/:AllowedCurationLabels From ed0a88835690b167421fdef113cff2b3d421310b Mon Sep 17 00:00:00 2001 From: Louis-wr <85620187+Louis-wr@users.noreply.github.com> Date: Wed, 14 Dec 2022 10:24:36 +0000 Subject: [PATCH 160/354] updated header for demo --- distros/dataverse.no/modification/dataverse_header.xhtml | 1 + 1 file changed, 1 insertion(+) diff --git a/distros/dataverse.no/modification/dataverse_header.xhtml b/distros/dataverse.no/modification/dataverse_header.xhtml index 775c0bf..c4481e7 100644 --- a/distros/dataverse.no/modification/dataverse_header.xhtml +++ b/distros/dataverse.no/modification/dataverse_header.xhtml @@ -65,6 +65,7 @@
+ Test and Demo only