From 6ba2a1f4e3d282575325db160ed07282cbb734b0 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 13 May 2022 11:05:08 +0000
Subject: [PATCH 001/354] dataverse.no
---
.../dataverse.no/configs/backup-http-ssl.conf | 294 ++++
.../dataverse.no/configs/backup.http-ssl.conf | 287 +++
distros/dataverse.no/configs/domain.xml | 649 +++++++
.../configs/htdocs-ssl/index.html | 1 +
.../dataverse.no/configs/htdocs/index.html | 1 +
.../dataverse.no/configs/http-ssl-test.conf | 322 ++++
distros/dataverse.no/configs/http-ssl.conf | 349 ++++
distros/dataverse.no/configs/httpd.conf | 551 ++++++
distros/dataverse.no/configs/schema.xml | 1546 +++++++++++++++++
distros/dataverse.no/docker-compose.yaml | 282 +++
.../dataverse.no/init.d/006-s3-aws-storage.sh | 19 +
.../dataverse.no/init.d/007-s3-aws-storage.sh | 19 +
.../dataverse.no/init.d/01-persistent-id.sh | 22 +
.../dataverse.no/init.d/010-mailrelay-set.sh | 9 +
.../dataverse.no/init.d/011-local-storage.sh | 6 +
.../dataverse.no/init.d/012-minio-bucket1.sh | 17 +
.../dataverse.no/init.d/021-jhove-set-link.sh | 3 +
distros/dataverse.no/init.d/022-splitpath.sh | 3 +
distros/dataverse.no/init.d/03-doi-set.sh | 15 +
distros/dataverse.no/init.d/04-setdomain.sh | 10 +
distros/dataverse.no/init.d/07-previewers.sh | 38 +
.../dataverse.no/init.d/08-federated-login.sh | 7 +
distros/dataverse.no/init.d/105-reindex.sh | 3 +
distros/dataverse.no/init.d/cors.json | 10 +
24 files changed, 4463 insertions(+)
create mode 100755 distros/dataverse.no/configs/backup-http-ssl.conf
create mode 100755 distros/dataverse.no/configs/backup.http-ssl.conf
create mode 100644 distros/dataverse.no/configs/domain.xml
create mode 100755 distros/dataverse.no/configs/htdocs-ssl/index.html
create mode 100755 distros/dataverse.no/configs/htdocs/index.html
create mode 100755 distros/dataverse.no/configs/http-ssl-test.conf
create mode 100755 distros/dataverse.no/configs/http-ssl.conf
create mode 100755 distros/dataverse.no/configs/httpd.conf
create mode 100644 distros/dataverse.no/configs/schema.xml
create mode 100755 distros/dataverse.no/docker-compose.yaml
create mode 100755 distros/dataverse.no/init.d/006-s3-aws-storage.sh
create mode 100755 distros/dataverse.no/init.d/007-s3-aws-storage.sh
create mode 100755 distros/dataverse.no/init.d/01-persistent-id.sh
create mode 100755 distros/dataverse.no/init.d/010-mailrelay-set.sh
create mode 100755 distros/dataverse.no/init.d/011-local-storage.sh
create mode 100755 distros/dataverse.no/init.d/012-minio-bucket1.sh
create mode 100755 distros/dataverse.no/init.d/021-jhove-set-link.sh
create mode 100755 distros/dataverse.no/init.d/022-splitpath.sh
create mode 100755 distros/dataverse.no/init.d/03-doi-set.sh
create mode 100755 distros/dataverse.no/init.d/04-setdomain.sh
create mode 100755 distros/dataverse.no/init.d/07-previewers.sh
create mode 100755 distros/dataverse.no/init.d/08-federated-login.sh
create mode 100755 distros/dataverse.no/init.d/105-reindex.sh
create mode 100644 distros/dataverse.no/init.d/cors.json
diff --git a/distros/dataverse.no/configs/backup-http-ssl.conf b/distros/dataverse.no/configs/backup-http-ssl.conf
new file mode 100755
index 0000000..19247ee
--- /dev/null
+++ b/distros/dataverse.no/configs/backup-http-ssl.conf
@@ -0,0 +1,294 @@
+#
+# When we also provide SSL we have to listen to the
+# the HTTPS port in addition.
+#
+Listen 443 https
+
+##
+## SSL Global Context
+##
+## All SSL configuration in this context applies both to
+## the main server and all SSL-enabled virtual hosts.
+##
+
+# Pass Phrase Dialog:
+# Configure the pass phrase gathering process.
+# The filtering dialog program (`builtin' is a internal
+# terminal dialog) has to provide the pass phrase on stdout.
+SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog
+
+# Inter-Process Session Cache:
+# Configure the SSL Session Cache: First the mechanism
+# to use and second the expiring timeout (in seconds).
+SSLSessionCache shmcb:/run/httpd/sslcache(512000)
+SSLSessionCacheTimeout 300
+
+# Pseudo Random Number Generator (PRNG):
+# Configure one or more sources to seed the PRNG of the
+# SSL library. The seed data should be of good random quality.
+# WARNING! On some platforms /dev/random blocks if not enough entropy
+# is available. This means you then cannot use the /dev/random device
+# because it would lead to very long connection times (as long as
+# it requires to make more entropy available). But usually those
+# platforms additionally provide a /dev/urandom device which doesn't
+# block. So, if available, use this one instead. Read the mod_ssl User
+# Manual for more details.
+SSLRandomSeed startup file:/dev/urandom 256
+SSLRandomSeed connect builtin
+#SSLRandomSeed startup file:/dev/random 512
+#SSLRandomSeed connect file:/dev/random 512
+#SSLRandomSeed connect file:/dev/urandom 512
+
+#
+# Use "SSLCryptoDevice" to enable any supported hardware
+# accelerators. Use "openssl engine -v" to list supported
+# engine names. NOTE: If you enable an accelerator and the
+# server does not start, consult the error logs and ensure
+# your accelerator is functioning properly.
+#
+SSLCryptoDevice builtin
+#SSLCryptoDevice ubsec
+
+##
+## SSL Virtual Host Context
+##
+#
+
+ ServerName test-docker.dataverse.no
+
+ SSLProxyEngine on
+ ProxyPass / https://test-docker.dataverse.no:443/
+ ProxyPassReverse / https://test-docker.dataverse.no:443/
+
+
+
+
+# General setup for the virtual host, inherited from global configuration
+#DocumentRoot "/var/www/html"
+#ServerName www.example.com:443
+ServerName test-docker.dataverse.no
+Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains"
+# Content-Security-Policy: noen java-filer laster fra http, så denne kan
+# ikke brukes.
+#Header always set Content-Security-Policy "default-src https:"
+Header always set X-Frame-Options "SAMEORIGIN"
+Header always set X-XSS-Protection "1; mode=block"
+Header always set X-Content-Type-Options "nosniff"
+#:443
+
+# Use separate log files for the SSL virtual host; note that LogLevel
+# is not inherited from httpd.conf.
+ErrorLog /dev/stdout
+TransferLog /dev/stdout
+LoadModule dumpio_module modules/mod_dumpio.so
+
+ DumpIOInput On
+ DumpIOOutput On
+
+LogLevel dumpio:trace7
+
+Header always set X-Frame-Options "SAMEORIGIN"
+Header always set X-XSS-Protection "1; mode=block"
+Header always set X-Content-Type-Options "nosniff"
+Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None"
+
+# SSL Engine Switch:
+# Enable/Disable SSL for this virtual host.
+SSLEngine on
+
+# SSL Protocol support:
+# List the enable protocol levels with which clients will be able to
+# connect. Disable SSLv2 access by default:
+SSLProtocol all -SSLv2 -SSLv3
+
+# SSL Cipher Suite:
+# List the ciphers that the client is permitted to negotiate.
+# See the mod_ssl documentation for a complete list.
+SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA
+
+# Speed-optimized SSL Cipher configuration:
+# If speed is your main concern (on busy HTTPS servers e.g.),
+# you might want to force clients to specific, performance
+# optimized ciphers. In this case, prepend those ciphers
+# to the SSLCipherSuite list, and enable SSLHonorCipherOrder.
+# Caveat: by giving precedence to RC4-SHA and AES128-SHA
+# (as in the example below), most connections will no longer
+# have perfect forward secrecy - if the server's key is
+# compromised, captures of past or future traffic must be
+# considered compromised, too.
+#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5
+#SSLHonorCipherOrder on
+
+# Server Certificate:
+# Point SSLCertificateFile at a PEM encoded certificate. If
+# the certificate is encrypted, then you will be prompted for a
+# pass phrase. Note that a kill -HUP will prompt again. A new
+# certificate can be generated using the genkey(1) command.
+SSLCertificateFile /etc/pki/tls/certs/localhost.crt
+
+# Server Private Key:
+# If the key is not combined with the certificate, use this
+# directive to point at the key file. Keep in mind that if
+# you've both a RSA and a DSA private key you can configure
+# both in parallel (to also allow the use of DSA ciphers, etc.)
+SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
+
+# Server Certificate Chain:
+# Point SSLCertificateChainFile at a file containing the
+# concatenation of PEM encoded CA certificates which form the
+# certificate chain for the server certificate. Alternatively
+# the referenced file can be the same as SSLCertificateFile
+# when the CA certificates are directly appended to the server
+# certificate for convinience.
+#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
+
+# Certificate Authority (CA):
+# Set the CA certificate verification path where to find CA
+# certificates for client authentication or alternatively one
+# huge file containing all of them (file must be PEM encoded)
+#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
+
+# Client Authentication (Type):
+# Client certificate verification type and depth. Types are
+# none, optional, require and optional_no_ca. Depth is a
+# number which specifies how deeply to verify the certificate
+# issuer chain before deciding the certificate is not valid.
+#SSLVerifyClient require
+#SSLVerifyDepth 10
+
+LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so
+
+#
+# Turn this on to support "require valid-user" rules from other
+# mod_authn_* modules, and use "require shib-session" for anonymous
+# session-based authorization in mod_shib.
+#
+ShibCompatValidUser Off
+
+#
+# Ensures handler will be accessible.
+#
+
+ AuthType None
+ Require all granted
+ # vty
+ ShibRequestSetting requireSession 1
+ require shib-session
+
+
+#
+# Used for example style sheet in error templates.
+#
+
+
+ AuthType None
+ Require all granted
+
+ Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css
+
+
+#
+# Configure the module for content.
+#
+# You MUST enable AuthType shibboleth for the module to process
+# any requests, and there MUST be a require command as well. To
+# enable Shibboleth but not specify any session/access requirements
+# use "require shibboleth".
+#
+
+ AuthType shibboleth
+ ShibRequestSetting requireSession 1
+ require shib-session
+
+#ProxyPass / ajp://dataverse:8009
+#ProxyPassReverse / ajp://dataverse:8009
+
+# Access Control:
+# With SSLRequire you can do per-directory access control based
+# on arbitrary complex boolean expressions containing server
+# variable checks and other lookup directives. The syntax is a
+# mixture between C and Perl. See the mod_ssl documentation
+# for more details.
+#
+#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \
+# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \
+# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \
+# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \
+# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \
+# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/
+#
+
+# SSL Engine Options:
+# Set various options for the SSL engine.
+# o FakeBasicAuth:
+# Translate the client X.509 into a Basic Authorisation. This means that
+# the standard Auth/DBMAuth methods can be used for access control. The
+# user name is the `one line' version of the client's X.509 certificate.
+# Note that no password is obtained from the user. Every entry in the user
+# file needs this password: `xxj31ZMTZzkVA'.
+# o ExportCertData:
+# This exports two additional environment variables: SSL_CLIENT_CERT and
+# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
+# server (always existing) and the client (only existing when client
+# authentication is used). This can be used to import the certificates
+# into CGI scripts.
+# o StdEnvVars:
+# This exports the standard SSL/TLS related `SSL_*' environment variables.
+# Per default this exportation is switched off for performance reasons,
+# because the extraction step is an expensive operation and is usually
+# useless for serving static content. So one usually enables the
+# exportation for CGI and SSI requests only.
+# o StrictRequire:
+# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
+# under a "Satisfy any" situation, i.e. when it applies access is denied
+# and no other module can change it.
+# o OptRenegotiate:
+# This enables optimized SSL connection renegotiation handling when SSL
+# directives are used in per-directory context.
+#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
+
+ SSLOptions +StdEnvVars
+
+
+ SSLOptions +StdEnvVars
+
+
+# SSL Protocol Adjustments:
+# The safe and default but still SSL/TLS standard compliant shutdown
+# approach is that mod_ssl sends the close notify alert but doesn't wait for
+# the close notify alert from client. When you need a different shutdown
+# approach you can use one of the following variables:
+# o ssl-unclean-shutdown:
+# This forces an unclean shutdown when the connection is closed, i.e. no
+# SSL close notify alert is send or allowed to received. This violates
+# the SSL/TLS standard but is needed for some brain-dead browsers. Use
+# this when you receive I/O errors because of the standard approach where
+# mod_ssl sends the close notify alert.
+# o ssl-accurate-shutdown:
+# This forces an accurate shutdown when the connection is closed, i.e. a
+# SSL close notify alert is send and mod_ssl waits for the close notify
+# alert of the client. This is 100% SSL/TLS standard compliant, but in
+# practice often causes hanging connections with brain-dead browsers. Use
+# this only for browsers where you know that their SSL implementation
+# works correctly.
+# Notice: Most problems of broken clients are also related to the HTTP
+# keep-alive facility, so you usually additionally want to disable
+# keep-alive for those clients, too. Use variable "nokeepalive" for this.
+# Similarly, one has to force some clients to use HTTP/1.0 to workaround
+# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
+# "force-response-1.0" for this.
+BrowserMatch "MSIE [2-5]" \
+ nokeepalive ssl-unclean-shutdown \
+ downgrade-1.0 force-response-1.0
+
+# Per-Server Logging:
+# The home of a custom SSL log file. Use this when you want a
+# compact non-error SSL logfile on a virtual host basis.
+#CustomLog /dev/stdout \
+# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
+Customlog /var/log/httpd/access.log combined
+ErrorLog /var/log/httpd/error.log
+
+ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i"
+
+
diff --git a/distros/dataverse.no/configs/backup.http-ssl.conf b/distros/dataverse.no/configs/backup.http-ssl.conf
new file mode 100755
index 0000000..414970c
--- /dev/null
+++ b/distros/dataverse.no/configs/backup.http-ssl.conf
@@ -0,0 +1,287 @@
+#
+# When we also provide SSL we have to listen to the
+# the HTTPS port in addition.
+#
+Listen 443 https
+
+##
+## SSL Global Context
+##
+## All SSL configuration in this context applies both to
+## the main server and all SSL-enabled virtual hosts.
+##
+
+# Pass Phrase Dialog:
+# Configure the pass phrase gathering process.
+# The filtering dialog program (`builtin' is a internal
+# terminal dialog) has to provide the pass phrase on stdout.
+SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog
+
+# Inter-Process Session Cache:
+# Configure the SSL Session Cache: First the mechanism
+# to use and second the expiring timeout (in seconds).
+SSLSessionCache shmcb:/run/httpd/sslcache(512000)
+SSLSessionCacheTimeout 300
+
+# Pseudo Random Number Generator (PRNG):
+# Configure one or more sources to seed the PRNG of the
+# SSL library. The seed data should be of good random quality.
+# WARNING! On some platforms /dev/random blocks if not enough entropy
+# is available. This means you then cannot use the /dev/random device
+# because it would lead to very long connection times (as long as
+# it requires to make more entropy available). But usually those
+# platforms additionally provide a /dev/urandom device which doesn't
+# block. So, if available, use this one instead. Read the mod_ssl User
+# Manual for more details.
+SSLRandomSeed startup file:/dev/urandom 256
+SSLRandomSeed connect builtin
+#SSLRandomSeed startup file:/dev/random 512
+#SSLRandomSeed connect file:/dev/random 512
+#SSLRandomSeed connect file:/dev/urandom 512
+
+#
+# Use "SSLCryptoDevice" to enable any supported hardware
+# accelerators. Use "openssl engine -v" to list supported
+# engine names. NOTE: If you enable an accelerator and the
+# server does not start, consult the error logs and ensure
+# your accelerator is functioning properly.
+#
+SSLCryptoDevice builtin
+#SSLCryptoDevice ubsec
+
+##
+## SSL Virtual Host Context
+##
+#
+
+ ServerName test-docker.dataverse.no
+
+ SSLProxyEngine on
+ ProxyPass / https://test-docker.dataverse.no:443/
+ ProxyPassReverse / https://test-docker.dataverse.no:443/
+
+
+
+
+# General setup for the virtual host, inherited from global configuration
+#DocumentRoot "/var/www/html"
+#ServerName www.example.com:443
+ServerName test-docker.dataverse.no
+Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains"
+# Content-Security-Policy: noen java-filer laster fra http, så denne kan
+# ikke brukes.
+#Header always set Content-Security-Policy "default-src https:"
+Header always set X-Frame-Options "SAMEORIGIN"
+Header always set X-XSS-Protection "1; mode=block"
+Header always set X-Content-Type-Options "nosniff"
+#:443
+
+# Use separate log files for the SSL virtual host; note that LogLevel
+# is not inherited from httpd.conf.
+ErrorLog /dev/stdout
+TransferLog /dev/stdout
+LogLevel warn
+
+Header always set X-Frame-Options "SAMEORIGIN"
+Header always set X-XSS-Protection "1; mode=block"
+Header always set X-Content-Type-Options "nosniff"
+Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None"
+
+# SSL Engine Switch:
+# Enable/Disable SSL for this virtual host.
+SSLEngine on
+
+# SSL Protocol support:
+# List the enable protocol levels with which clients will be able to
+# connect. Disable SSLv2 access by default:
+SSLProtocol all -SSLv2 -SSLv3
+
+# SSL Cipher Suite:
+# List the ciphers that the client is permitted to negotiate.
+# See the mod_ssl documentation for a complete list.
+SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA
+
+# Speed-optimized SSL Cipher configuration:
+# If speed is your main concern (on busy HTTPS servers e.g.),
+# you might want to force clients to specific, performance
+# optimized ciphers. In this case, prepend those ciphers
+# to the SSLCipherSuite list, and enable SSLHonorCipherOrder.
+# Caveat: by giving precedence to RC4-SHA and AES128-SHA
+# (as in the example below), most connections will no longer
+# have perfect forward secrecy - if the server's key is
+# compromised, captures of past or future traffic must be
+# considered compromised, too.
+#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5
+#SSLHonorCipherOrder on
+
+# Server Certificate:
+# Point SSLCertificateFile at a PEM encoded certificate. If
+# the certificate is encrypted, then you will be prompted for a
+# pass phrase. Note that a kill -HUP will prompt again. A new
+# certificate can be generated using the genkey(1) command.
+SSLCertificateFile /etc/pki/tls/certs/localhost.crt
+
+# Server Private Key:
+# If the key is not combined with the certificate, use this
+# directive to point at the key file. Keep in mind that if
+# you've both a RSA and a DSA private key you can configure
+# both in parallel (to also allow the use of DSA ciphers, etc.)
+SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
+
+# Server Certificate Chain:
+# Point SSLCertificateChainFile at a file containing the
+# concatenation of PEM encoded CA certificates which form the
+# certificate chain for the server certificate. Alternatively
+# the referenced file can be the same as SSLCertificateFile
+# when the CA certificates are directly appended to the server
+# certificate for convinience.
+#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
+
+# Certificate Authority (CA):
+# Set the CA certificate verification path where to find CA
+# certificates for client authentication or alternatively one
+# huge file containing all of them (file must be PEM encoded)
+#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
+
+# Client Authentication (Type):
+# Client certificate verification type and depth. Types are
+# none, optional, require and optional_no_ca. Depth is a
+# number which specifies how deeply to verify the certificate
+# issuer chain before deciding the certificate is not valid.
+#SSLVerifyClient require
+#SSLVerifyDepth 10
+
+LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so
+
+#
+# Turn this on to support "require valid-user" rules from other
+# mod_authn_* modules, and use "require shib-session" for anonymous
+# session-based authorization in mod_shib.
+#
+ShibCompatValidUser Off
+
+#
+# Ensures handler will be accessible.
+#
+
+ AuthType None
+ Require all granted
+ # vty
+ ShibRequestSetting requireSession 1
+ require shib-session
+
+
+#
+# Used for example style sheet in error templates.
+#
+
+
+ AuthType None
+ Require all granted
+
+ Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css
+
+
+#
+# Configure the module for content.
+#
+# You MUST enable AuthType shibboleth for the module to process
+# any requests, and there MUST be a require command as well. To
+# enable Shibboleth but not specify any session/access requirements
+# use "require shibboleth".
+#
+
+ AuthType shibboleth
+ ShibRequestSetting requireSession 1
+ require shib-session
+
+#ProxyPass / ajp://dataverse:8009
+#ProxyPassReverse / ajp://dataverse:8009
+
+# Access Control:
+# With SSLRequire you can do per-directory access control based
+# on arbitrary complex boolean expressions containing server
+# variable checks and other lookup directives. The syntax is a
+# mixture between C and Perl. See the mod_ssl documentation
+# for more details.
+#
+#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \
+# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \
+# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \
+# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \
+# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \
+# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/
+#
+
+# SSL Engine Options:
+# Set various options for the SSL engine.
+# o FakeBasicAuth:
+# Translate the client X.509 into a Basic Authorisation. This means that
+# the standard Auth/DBMAuth methods can be used for access control. The
+# user name is the `one line' version of the client's X.509 certificate.
+# Note that no password is obtained from the user. Every entry in the user
+# file needs this password: `xxj31ZMTZzkVA'.
+# o ExportCertData:
+# This exports two additional environment variables: SSL_CLIENT_CERT and
+# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
+# server (always existing) and the client (only existing when client
+# authentication is used). This can be used to import the certificates
+# into CGI scripts.
+# o StdEnvVars:
+# This exports the standard SSL/TLS related `SSL_*' environment variables.
+# Per default this exportation is switched off for performance reasons,
+# because the extraction step is an expensive operation and is usually
+# useless for serving static content. So one usually enables the
+# exportation for CGI and SSI requests only.
+# o StrictRequire:
+# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
+# under a "Satisfy any" situation, i.e. when it applies access is denied
+# and no other module can change it.
+# o OptRenegotiate:
+# This enables optimized SSL connection renegotiation handling when SSL
+# directives are used in per-directory context.
+#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
+
+ SSLOptions +StdEnvVars
+
+
+ SSLOptions +StdEnvVars
+
+
+# SSL Protocol Adjustments:
+# The safe and default but still SSL/TLS standard compliant shutdown
+# approach is that mod_ssl sends the close notify alert but doesn't wait for
+# the close notify alert from client. When you need a different shutdown
+# approach you can use one of the following variables:
+# o ssl-unclean-shutdown:
+# This forces an unclean shutdown when the connection is closed, i.e. no
+# SSL close notify alert is send or allowed to received. This violates
+# the SSL/TLS standard but is needed for some brain-dead browsers. Use
+# this when you receive I/O errors because of the standard approach where
+# mod_ssl sends the close notify alert.
+# o ssl-accurate-shutdown:
+# This forces an accurate shutdown when the connection is closed, i.e. a
+# SSL close notify alert is send and mod_ssl waits for the close notify
+# alert of the client. This is 100% SSL/TLS standard compliant, but in
+# practice often causes hanging connections with brain-dead browsers. Use
+# this only for browsers where you know that their SSL implementation
+# works correctly.
+# Notice: Most problems of broken clients are also related to the HTTP
+# keep-alive facility, so you usually additionally want to disable
+# keep-alive for those clients, too. Use variable "nokeepalive" for this.
+# Similarly, one has to force some clients to use HTTP/1.0 to workaround
+# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
+# "force-response-1.0" for this.
+BrowserMatch "MSIE [2-5]" \
+ nokeepalive ssl-unclean-shutdown \
+ downgrade-1.0 force-response-1.0
+
+# Per-Server Logging:
+# The home of a custom SSL log file. Use this when you want a
+# compact non-error SSL logfile on a virtual host basis.
+CustomLog /dev/stdout \
+ "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
+
+ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i"
+
+
diff --git a/distros/dataverse.no/configs/domain.xml b/distros/dataverse.no/configs/domain.xml
new file mode 100644
index 0000000..818f4e3
--- /dev/null
+++ b/distros/dataverse.no/configs/domain.xml
@@ -0,0 +1,649 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ log-notifier
+
+
+
+
+
+
+
+
+
+ log-notifier
+
+
+
+ log-notifier
+
+
+ log-notifier
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -server
+ [9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED
+ [9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED
+ [9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.lang=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.net=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.nio=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.util=ALL-UNNAMED
+ [9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
+ [9|]--add-opens=java.management/sun.management=ALL-UNNAMED
+ [9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED
+ [9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED
+ [9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED
+ [9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED
+ [9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED
+ -XX:NewRatio=2
+ -XX:+UnlockDiagnosticVMOptions
+ -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory
+ -Djava.awt.headless=true
+ -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf
+ -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy
+ -Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder
+ -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as
+ -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks
+ -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks
+ -Djavax.xml.accessExternalSchema=all
+ -Djdbc.drivers=org.h2.Driver
+ -Djdk.corba.allowOutputStreamSubclass=true
+ -Djdk.tls.rejectClientInitiatedRenegotiation=true
+ -DANTLR_USE_DIRECT_CLASS_LOADING=true
+ -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.shell.remote,org.apache.felix.fileinstall
+ -Dosgi.shell.telnet.port=6666
+ -Dosgi.shell.telnet.maxconn=1
+ -Dosgi.shell.telnet.ip=127.0.0.1
+ -Dgosh.args=--nointeractive
+ -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/
+ -Dfelix.fileinstall.poll=5000
+ -Dfelix.fileinstall.log.level=2
+ -Dfelix.fileinstall.bundles.new.start=true
+ -Dfelix.fileinstall.bundles.startTransient=true
+ -Dfelix.fileinstall.disableConfigSave=false
+ -Dcom.ctc.wstx.returnNullForDefaultNamespace=true
+ -Dorg.glassfish.grizzly.DEFAULT_MEMORY_MANAGER=org.glassfish.grizzly.memory.HeapMemoryManager
+ -Dorg.glassfish.grizzly.nio.DefaultSelectorHandler.force-selector-spin-detection=true
+ -Dorg.jboss.weld.serialization.beanIdentifierIndexOptimization=false
+ [|8]-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed
+ [|8]-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext
+ [1.8.0|1.8.0u120]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.6.jar
+ [1.8.0u121|1.8.0u160]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.7.jar
+ [1.8.0u161|1.8.0u190]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.jar
+ [1.8.0u191|1.8.0u250]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.1.jar
+ [1.8.0u251|]-Xbootclasspath/a:${com.sun.aas.installRoot}/lib/grizzly-npn-api.jar
+ [Azul-1.8.0u222|1.8.0u260]-XX:+UseOpenJSSE
+ -XX:+UseContainerSupport
+ -XX:MaxRAMPercentage=${ENV=MEM_MAX_RAM_PERCENTAGE}
+ -Xss${ENV=MEM_XSS}
+ -Ddataverse.files.S3.type=s3
+ -Ddataverse.files.S3.label=S3
+ -Ddataverse.files.S3.bucket-name=2002-green-dataversenotest1
+ -Ddataverse.files.S3.download-redirect=true
+ -Ddataverse.files.S3.url-expiration-minutes=120
+ -Ddataverse.files.S3.connection-pool-size=4096
+ -Ddataverse.files.storage-driver-id=S3
+ -Ddataverse.files.S3.profile=cloudian
+ -Ddataverse.files.S3.custom-endpoint-url=https://s3-oslo.educloud.no
+ -Ddataverse.files.file.type=file
+ -Ddataverse.files.file.label=file
+ -Ddataverse.files.file.directory=/data
+ -Ddoi.username=BIBSYS.UIT-ORD
+ -Ddoi.password=${ALIAS=doi_password_alias}
+ -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org
+ -Ddoi.baseurlstring=https://mds.test.datacite.org
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ log-notifier
+
+
+
+
+
+
+ log-notifier
+
+
+
+ log-notifier
+
+
+ log-notifier
+
+
+
+ -server
+ [9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED
+ [9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED
+ [9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.lang=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.net=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.nio=ALL-UNNAMED
+ [9|]--add-opens=java.base/java.util=ALL-UNNAMED
+ [9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED
+ [9|]--add-opens=java.management/sun.management=ALL-UNNAMED
+ [9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED
+ [9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED
+ [9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED
+ [9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED
+ [9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED
+ -Xmx512m
+ -XX:NewRatio=2
+ -XX:+UnlockDiagnosticVMOptions
+ -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory
+ -Djava.awt.headless=true
+ -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf
+ -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy
+ -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as
+ -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks
+ -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks
+ -Djdbc.drivers=org.h2.Driver
+ -Djdk.corba.allowOutputStreamSubclass=true
+ -Djdk.tls.rejectClientInitiatedRenegotiation=true
+ -DANTLR_USE_DIRECT_CLASS_LOADING=true
+ -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall
+ -Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT}
+ -Dosgi.shell.telnet.maxconn=1
+ -Dosgi.shell.telnet.ip=127.0.0.1
+ -Dgosh.args=--nointeractive
+ -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/
+ -Dfelix.fileinstall.poll=5000
+ -Dfelix.fileinstall.log.level=3
+ -Dfelix.fileinstall.bundles.new.start=true
+ -Dfelix.fileinstall.bundles.startTransient=true
+ -Dfelix.fileinstall.disableConfigSave=false
+ -Dorg.glassfish.grizzly.DEFAULT_MEMORY_MANAGER=org.glassfish.grizzly.memory.HeapMemoryManager
+ -Dorg.glassfish.grizzly.nio.DefaultSelectorHandler.force-selector-spin-detection=true
+ -Dorg.jboss.weld.serialization.beanIdentifierIndexOptimization=false
+ [|8]-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed
+ [|8]-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext
+ [1.8.0|1.8.0u120]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.6.jar
+ [1.8.0u121|1.8.0u160]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.7.jar
+ [1.8.0u161|1.8.0u190]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.jar
+ [1.8.0u191|1.8.0u250]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.1.jar
+ [1.8.0u251|]-Xbootclasspath/a:${com.sun.aas.installRoot}/lib/grizzly-npn-api.jar
+ [Azul-1.8.0u222|1.8.0u260]-XX:+UseOpenJSSE
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/distros/dataverse.no/configs/htdocs-ssl/index.html b/distros/dataverse.no/configs/htdocs-ssl/index.html
new file mode 100755
index 0000000..6b233f9
--- /dev/null
+++ b/distros/dataverse.no/configs/htdocs-ssl/index.html
@@ -0,0 +1 @@
+
It works with SSL!
diff --git a/distros/dataverse.no/configs/htdocs/index.html b/distros/dataverse.no/configs/htdocs/index.html
new file mode 100755
index 0000000..f5f1c37
--- /dev/null
+++ b/distros/dataverse.no/configs/htdocs/index.html
@@ -0,0 +1 @@
+
Ingested files:{3}
+notification.ingest.completedwitherrors=Dataset {2} ingest process has finished with errors.
Ingested files:{3}
+notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified.
+notification.import.filesystem=Dataset {1} has been successfully uploaded and verified.
+notification.import.checksum={1}, dataset had file checksums added via a batch job.
+removeNotification=Remove Notification
+groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned.
+user.message.signup.label=Create Account
+user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files.
+user.signup.otherLogInOptions.tip=You can also create a Dataverse account with one of our other log in options.
+user.username.illegal.tip=Between 2-60 characters, and can use "a-z", "0-9", "_" for your username.
+user.username=Username
+user.username.taken=This username is already taken.
+user.username.invalid=This username contains an invalid character or is outside the length requirement (2-60 characters).
+user.username.valid=Create a valid username of 2 to 60 characters in length containing letters (a-Z), numbers (0-9), dashes (-), underscores (_), and periods (.).
+user.noPasswd=No Password
+user.currentPasswd=Current Password
+user.currentPasswd.tip=Please enter the current password for this account.
+user.passwd.illegal.tip=Password needs to be at least 6 characters, include one letter and one number, and special characters may be used.
+user.rePasswd=Retype Password
+user.rePasswd.tip=Please retype the password you entered above.
+user.firstName=Given Name
+user.firstName.tip=The first name or name you would like to use for this account.
+user.lastName=Family Name
+user.lastName.tip=The last name you would like to use for this account.
+user.email.tip=A valid email address you have access to in order to be contacted.
+user.email.taken=This email address is already taken.
+user.affiliation.tip=The organization with which you are affiliated.
+user.position=Position
+user.position.tip=Your role or title at the organization you are affiliated with; such as staff, faculty, student, etc.
+user.acccountterms=General Terms of Use
+user.acccountterms.tip=The terms and conditions for using the application and services.
+user.acccountterms.required=Please check the box to indicate your acceptance of the General Terms of Use.
+user.acccountterms.iagree=I have read and accept the Dataverse General Terms of Use as outlined above.
+user.createBtn=Create Account
+user.updatePassword.welcome=Welcome to Dataverse {0}
+user.updatePassword.warning=With the release of our new Dataverse 4.0 upgrade, the password requirements and General Terms of Use have updated. As this is the first time you are using Dataverse since the update, you need to create a new password and agree to the new General Terms of Use.
+user.updatePassword.password={0}
+user.password=Password
+user.newPassword=New Password
+authenticationProvidersAvailable.tip={0}There are no active authentication providers{1}If you are a system administrator, please enable one using the API.{2}If you are not a system administrator, please contact the one for your institution.
+
+passwdVal.passwdReq.title=Your password must contain:
+passwdVal.passwdReq.goodStrength=passwords of at least {0} characters are exempt from all other requirements
+passwdVal.passwdReq.lengthReq=At least {0} characters
+passwdVal.passwdReq.characteristicsReq=At least 1 character from {0} of the following types:
+passwdVal.passwdReq.notInclude=It may not include:
+passwdVal.passwdReq.consecutiveDigits=More than {0} numbers in a row
+passwdVal.passwdReq.dictionaryWords=Dictionary words
+passwdVal.passwdReq.unknownPasswordRule=Unknown, contact your administrator
+#printf syntax used to pass to passay library
+passwdVal.expireRule.errorCode=EXPIRED
+passwdVal.expireRule.errorMsg=The password is over %1$s days old and has expired.
+passwdVal.goodStrengthRule.errorMsg=Note: passwords are always valid with a %1$s or more character length regardless.
+passwdVal.goodStrengthRule.errorCode=NO_GOODSTRENGTH
+passwdVal.passwdReset.resetLinkTitle=Password Reset Link
+passwdVal.passwdReset.resetLinkDesc=Your password reset link is not valid
+passwdVal.passwdReset.resetInitiated=Password Reset Initiated
+passwdVal.passwdReset.valBlankLog=new password is blank
+passwdVal.passwdReset.valFacesError=Password Error
+passwdVal.passwdReset.valFacesErrorDesc=Please enter a new password for your account.
+passwdVal.passwdValBean.warnDictionaryRead=Dictionary was set, but none was read in.
+passwdVal.passwdValBean.warnDictionaryObj=PwDictionaries not set and no default password file found:
+passwdVal.passwdValBean.warnSetStrength=The PwGoodStrength {0} value competes with the PwMinLength value of {1} and is added to {2}
+
+# passwordreset.xhtml
+pageTitle.passwdReset.pre=Account Password Reset
+passwdReset.token=token :
+passwdReset.userLookedUp=user looked up :
+passwdReset.emailSubmitted=email submitted :
+passwdReset.details={0} Password Reset{1} - To initiate the password reset process, please provide your email address.
+passwdReset.submitRequest=Submit Password Request
+passwdReset.successSubmit.tip=If this email is associated with an account, then an email will be sent with further instructions to {0}.
+passwdReset.debug=DEBUG
+passwdReset.resetUrl=The reset URL is
+passwdReset.noEmail.tip=No email was actually sent because a user could not be found using the provided email address {0} but we don't mention this because we don't malicious users to use the form to determine if there is an account associated with an email address.
+passwdReset.illegalLink.tip=Your password reset link is not valid. If you need to reset your password, {0}click here{1} in order to request that your password to be reset again.
+passwdReset.newPasswd.details={0} Reset Password{1} \u2013 Our password requirements have changed. Please pick a strong password that matches the criteria below.
+passwdReset.newPasswd=New Password
+passwdReset.rePasswd=Retype Password
+passwdReset.resetBtn=Reset Password
+
+#loginpage.xhtml
+login.System=Login System
+login.forgot.text=Forgot your password?
+login.builtin=Dataverse Account
+login.institution=Institutional Account
+#login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
+#UB
+login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
+
+
+#login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance.
+#UB
+login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance.
+
+
+login.builtin.credential.usernameOrEmail=Username/Email
+login.builtin.credential.password=Password
+login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
+login.signup.blurb=Sign up for a Dataverse account.
+login.echo.credential.name=Name
+login.echo.credential.email=Email
+login.echo.credential.affiliation=Affiliation
+# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922
+login.error=Error validating the username, email address, or password. Please try again. If the problem persists, contact an administrator.
+user.error.cannotChangePassword=Sorry, your password cannot be changed. Please contact your system administrator.
+user.error.wrongPassword=Sorry, wrong password.
+login.button=Log In with {0}
+login.button.orcid=Create or Connect your ORCID
+# authentication providers
+auth.providers.title=Other options
+auth.providers.tip=You can convert a Dataverse account to use one of the options above. More information about account creation.
+auth.providers.title.builtin=Username/Email
+auth.providers.title.shib=Your Institution
+auth.providers.title.orcid=ORCID
+auth.providers.title.google=Google
+auth.providers.title.github=GitHub
+auth.providers.blurb=Log in or sign up with your {0} account — more information about account creation. Having trouble? Please contact {3} for assistance.
+auth.providers.persistentUserIdName.orcid=ORCID iD
+auth.providers.persistentUserIdName.github=ID
+auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers.
+auth.providers.persistentUserIdTooltip.github=GitHub assigns a unique number to every user.
+auth.providers.insufficientScope=Dataverse was not granted the permission to read user data from {0}.
+auth.providers.exception.userinfo=Error getting the user info record from {0}.
+auth.providers.token.failRetrieveToken=Dataverse could not retrieve an access token.
+auth.providers.token.failParseToken=Dataverse could not parse the access token.
+auth.providers.token.failGetUser=Dataverse could not get your user record. Please consult your administrator.
+auth.providers.orcid.helpmessage1=ORCID is an open, non-profit, community-based effort to provide a registry of unique researcher identifiers and a transparent method of linking research activities and outputs to these identifiers. ORCID is unique in its ability to reach across disciplines, research sectors, and national boundaries and its cooperation with other identifier systems. Find out more at orcid.org/about.
+auth.providers.orcid.helpmessage2=This repository uses your ORCID for authentication (so you don't need another username/password combination). Having your ORCID associated with your datasets also makes it easier for people to find the datasets you have published.
+
+# Friendly AuthenticationProvider names
+authenticationProvider.name.builtin=Dataverse
+authenticationProvider.name.null=(provider is unknown)
+authenticationProvider.name.github=GitHub
+authenticationProvider.name.google=Google
+authenticationProvider.name.orcid=ORCiD
+authenticationProvider.name.orcid-sandbox=ORCiD Sandbox
+authenticationProvider.name.shib=Shibboleth
+
+#confirmemail.xhtml
+confirmEmail.pageTitle=Email Verification
+confirmEmail.submitRequest=Verify Email
+confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}.
+confirmEmail.details.success=Email address verified!
+confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button.
+confirmEmail.details.goToAccountPageButton=Go to Account Information
+confirmEmail.notVerified=Not Verified
+confirmEmail.verified=Verified
+
+#shib.xhtml
+shib.btn.convertAccount=Convert Account
+shib.btn.createAccount=Create Account
+shib.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in?
+# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test
+shib.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in.
+# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test
+shib.welcomeExistingUserMessageDefaultInstitution=your institution
+shib.dataverseUsername=Dataverse Username
+shib.currentDataversePassword=Current Dataverse Password
+shib.accountInformation=Account Information
+shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account.
+shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account.
+
+# oauth2/firstLogin.xhtml
+oauth2.btn.convertAccount=Convert Existing Account
+oauth2.btn.createAccount=Create New Account
+oauth2.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in?
+oauth2.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in.
+oauth2.welcomeExistingUserMessageDefaultInstitution=your institution
+oauth2.dataverseUsername=Dataverse Username
+oauth2.currentDataversePassword=Current Dataverse Password
+oauth2.chooseUsername=Username:
+oauth2.passwordRejected=Validation Error - Wrong username or password.
+# oauth2.newAccount.title=Account Creation
+oauth2.newAccount.welcomeWithName=Welcome to Dataverse, {0}
+oauth2.newAccount.welcomeNoName=Welcome to Dataverse
+# oauth2.newAccount.email=Email
+# oauth2.newAccount.email.tip=Dataverse uses this email to notify you of issues regarding your data.
+oauth2.newAccount.suggestedEmails=Suggested Email Addresses:
+oauth2.newAccount.username=Username
+oauth2.newAccount.username.tip=This username will be your unique identifier as a Dataverse user.
+oauth2.newAccount.explanation=This information is provided by {0} and will be used to create your {1} account. To log in again, you will have to use the {0} log in option.
+oauth2.newAccount.suggestConvertInsteadOfCreate=If you already have a {0} account, you will need to convert your account.
+# oauth2.newAccount.tabs.convertAccount=Convert Existing Account
+oauth2.newAccount.buttons.convertNewAccount=Convert Account
+oauth2.newAccount.emailTaken=Email already taken. Consider merging the corresponding account instead.
+oauth2.newAccount.emailOk=Email OK.
+oauth2.newAccount.emailInvalid=Invalid email address.
+# oauth2.newAccount.usernameTaken=Username already taken.
+# oauth2.newAccount.usernameOk=Username OK.
+
+# oauth2/convert.xhtml
+# oauth2.convertAccount.title=Account Conversion
+oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account.
+oauth2.convertAccount.username=Existing username
+oauth2.convertAccount.password=Password
+oauth2.convertAccount.authenticationFailed=Your account can only be converted if you provide the correct username and password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account.
+oauth2.convertAccount.buttonTitle=Convert Account
+oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account.
+oauth2.convertAccount.failedDeactivated=Your existing account cannot be converted because it has been deactivated.
+
+# oauth2/callback.xhtml
+oauth2.callback.page.title=OAuth Callback
+oauth2.callback.message=Authentication Error - Dataverse could not authenticate your login with the provider that you selected. Please make sure you authorize your account to connect with Dataverse. For more details about the information being requested, see the User Guide.
+
+# deactivated user accounts
+deactivated.error=Sorry, your account has been deactivated.
+
+# tab on dataverseuser.xhtml
+apitoken.title=API Token
+apitoken.message=Your API Token is valid for a year. Check out our {0}API Guide{1} for more information on using your API Token with the Dataverse APIs.
+apitoken.notFound=API Token for {0} has not been created.
+apitoken.expired.warning=This token is about to expire, please generate a new one.
+apitoken.expired.error=This token is expired, please generate a new one.
+apitoken.generateBtn=Create Token
+apitoken.regenerateBtn=Recreate Token
+apitoken.revokeBtn=Revoke Token
+apitoken.expirationDate.label=Expiration Date
+
+#dashboard.xhtml
+dashboard.title=Dashboard
+dashboard.card.harvestingclients.header=Harvesting Clients
+dashboard.card.harvestingclients.btn.manage=Manage Clients
+dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients}
+dashboard.card.harvestingclients.datasets={0, choice, 0#Datasets|1#Dataset|2#Datasets}
+dashboard.card.harvestingserver.header=Harvesting Server
+dashboard.card.harvestingserver.enabled=OAI server enabled
+dashboard.card.harvestingserver.disabled=OAI server disabled
+dashboard.card.harvestingserver.status=Status
+dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets}
+dashboard.card.harvestingserver.btn.manage=Manage Server
+dashboard.card.metadataexport.header=Metadata Export
+dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}.
+
+#harvestclients.xhtml
+harvestclients.title=Manage Harvesting Clients
+harvestclients.toptip=Harvesting can be scheduled to run at a specific time or on demand. Harvesting can be initiated here or via the REST API.
+harvestclients.noClients.label=No clients are configured.
+harvestclients.noClients.why.header=What is Harvesting?
+harvestclients.noClients.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting client, your Dataverse gathers metadata records from remote sources. These can be other Dataverse instances, or other archives that support OAI-PMH, the standard harvesting protocol.
+harvestclients.noClients.why.reason2=Harvested metadata records are searchable by users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation.
+harvestclients.noClients.how.header=How To Use Harvesting
+harvestclients.noClients.how.tip1=To harvest metadata, a Harvesting Client is created and configured for each remote repository. Note that when creating a client you will need to select an existing local dataverse to host harvested datasets.
+harvestclients.noClients.how.tip2=Harvested records can be kept in sync with the original repository through scheduled incremental updates, for example, daily or weekly. Alternatively, harvests can be run on demand, from this page or via the REST API.
+harvestclients.noClients.getStarted=To get started, click on the Add Client button above. To learn more about Harvesting, visit the Harvesting section of the User Guide.
+harvestclients.btn.add=Add Client
+harvestclients.tab.header.name=Nickname
+harvestclients.tab.header.url=URL
+harvestclients.tab.header.lastrun=Last Run
+harvestclients.tab.header.lastresults=Last Results
+harvestclients.tab.header.action=Actions
+harvestclients.tab.header.action.btn.run=Run Harvesting
+harvestclients.tab.header.action.btn.edit=Edit
+harvestclients.tab.header.action.btn.delete=Delete
+harvestclients.tab.header.action.btn.delete.dialog.header=Delete Harvesting Client
+harvestclients.tab.header.action.btn.delete.dialog.warning=Are you sure you want to delete the harvesting client "{0}"? Deleting the client will delete all datasets harvested from this remote server.
+harvestclients.tab.header.action.btn.delete.dialog.tip=Note, this action may take a while to process, depending on the number of harvested datasets.
+harvestclients.tab.header.action.delete.infomessage=Harvesting client is being deleted. Note, that this may take a while, depending on the amount of harvested content.
+harvestclients.actions.runharvest.success=Successfully started an asynchronous harvest for client "{0}" . Please reload the page to check on the harvest results.
+harvestclients.newClientDialog.step1=Step 1 of 4 - Client Information
+harvestclients.newClientDialog.title.new=Create Harvesting Client
+harvestclients.newClientDialog.help=Configure a client to harvest content from a remote server.
+harvestclients.newClientDialog.nickname=Nickname
+harvestclients.newClientDialog.nickname.helptext=Consists of letters, digits, underscores (_) and dashes (-).
+harvestclients.newClientDialog.nickname.required=Client nickname cannot be empty!
+harvestclients.newClientDialog.nickname.invalid=Client nickname can contain only letters, digits, underscores (_) and dashes (-); and must be at most 30 characters.
+harvestclients.newClientDialog.nickname.alreadyused=This nickname is already used.
+harvestclients.newClientDialog.type=Server Protocol
+harvestclients.newClientDialog.type.helptext=Only the OAI server protocol is currently supported.
+harvestclients.newClientDialog.type.OAI=OAI
+harvestclients.newClientDialog.type.Nesstar=Nesstar
+harvestclients.newClientDialog.url=Server URL
+harvestclients.newClientDialog.url.tip=URL of a harvesting resource.
+harvestclients.newClientDialog.url.watermark=Remote harvesting server, http://...
+harvestclients.newClientDialog.url.helptext.notvalidated=URL of a harvesting resource. Once you click 'Next', we will try to establish a connection to the server in order to verify that it is working, and to obtain extra information about its capabilities.
+harvestclients.newClientDialog.url.required=A valid harvesting server address is required.
+harvestclients.newClientDialog.url.invalid=Invalid URL. Failed to establish connection and receive a valid server response.
+harvestclients.newClientDialog.url.noresponse=Failed to establish connection to the server.
+harvestclients.newClientDialog.url.badresponse=Invalid response from the server.
+harvestclients.newClientDialog.dataverse=Local Dataverse
+harvestclients.newClientDialog.dataverse.tip=Dataverse that will host the datasets harvested from this remote resource.
+harvestclients.newClientDialog.dataverse.menu.enterName=Enter Dataverse Alias
+harvestclients.newClientDialog.dataverse.menu.header=Dataverse Name (Affiliate), Alias
+harvestclients.newClientDialog.dataverse.menu.invalidMsg=No matches found
+harvestclients.newClientDialog.dataverse.required=You must select an existing dataverse for this harvesting client.
+harvestclients.newClientDialog.step2=Step 2 of 4 - Format
+harvestclients.newClientDialog.oaiSets=OAI Set
+harvestclients.newClientDialog.oaiSets.tip=Harvesting sets offered by this OAI server.
+harvestclients.newClientDialog.oaiSets.noset=None
+harvestclients.newClientDialog.oaiSets.helptext=Selecting "none" will harvest the default set, as defined by the server. Often this will be the entire body of content across all sub-sets.
+harvestclients.newClientDialog.oaiSets.helptext.noset=This OAI server does not support named sets. The entire body of content offered by the server will be harvested.
+harvestclients.newClientDialog.oaiSets.listTruncated=Please note that the remote server was taking too long to return the full list of available OAI sets, so the list was truncated. Please select a set from the current list (or select the "no set" option), and try again later, if you need to change it.
+harvestclients.newClientDialog.oaiMetadataFormat=Metadata Format
+harvestclients.newClientDialog.oaiMetadataFormat.tip=Metadata formats offered by the remote server.
+harvestclients.newClientDialog.oaiMetadataFormat.required=Please select the metadata format to harvest from this archive.
+harvestclients.newClientDialog.step3=Step 3 of 4 - Schedule
+harvestclients.newClientDialog.schedule=Schedule
+harvestclients.newClientDialog.schedule.tip=Schedule harvesting to run automatically daily or weekly.
+harvestclients.newClientDialog.schedule.time.none.helptext=Leave harvesting unscheduled to run on demand only.
+harvestclients.newClientDialog.schedule.none=None
+harvestclients.newClientDialog.schedule.daily=Daily
+harvestclients.newClientDialog.schedule.weekly=Weekly
+harvestclients.newClientDialog.schedule.time=Time
+harvestclients.newClientDialog.schedule.day=Day
+harvestclients.newClientDialog.schedule.time.am=AM
+harvestclients.newClientDialog.schedule.time.pm=PM
+harvestclients.newClientDialog.schedule.time.helptext=Scheduled times are in your local time.
+harvestclients.newClientDialog.btn.create=Create Client
+harvestclients.newClientDialog.success=Successfully created harvesting client "{0}".
+harvestclients.newClientDialog.step4=Step 4 of 4 - Display
+harvestclients.newClientDialog.harvestingStyle=Archive Type
+harvestclients.newClientDialog.harvestingStyle.tip=Type of remote archive.
+harvestclients.newClientDialog.harvestingStyle.helptext=Select the archive type that best describes this remote server in order to properly apply formatting rules and styles to the harvested metadata as they are shown in the search results. Note that improperly selecting the type of the remote archive can result in incomplete entries in the search results, and a failure to redirect the user to the archival source of the data.
+harvestclients.newClientDialog.harvestingStyle.required=Please select one of the values from the menu.
+harvestclients.viewEditDialog.title=Edit Harvesting Client
+harvestclients.viewEditDialog.archiveUrl=Archive URL
+harvestclients.viewEditDialog.archiveUrl.tip=The URL of the archive that serves the data harvested by this client, which is used in search results for links to the original sources of the harvested content.
+harvestclients.viewEditDialog.archiveUrl.helptext=Edit if this URL differs from the Server URL.
+harvestclients.viewEditDialog.archiveDescription=Archive Description
+harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival source of the harvested content, displayed in search results.
+harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data.
+harvestclients.viewEditDialog.btn.save=Save Changes
+harvestclients.newClientDialog.title.edit=Edit Group {0}
+
+#harvestset.xhtml
+harvestserver.title=Manage Harvesting Server
+harvestserver.toptip=Define sets of local datasets that will be available for harvesting by remote clients.
+harvestserver.service.label=OAI Server
+harvestserver.service.enabled=Enabled
+harvestserver.service.disabled=Disabled
+harvestserver.service.disabled.msg=Harvesting Server is currently disabled.
+harvestserver.service.empty=No sets are configured.
+harvestserver.service.enable.success=OAI Service has been successfully enabled.
+harvestserver.noSets.why.header=What is a Harvesting Server?
+harvestserver.noSets.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting server, your Dataverse can make some of the local dataset metadata available to remote harvesting clients. These can be other Dataverse instances, or any other clients that support OAI-PMH harvesting protocol.
+harvestserver.noSets.why.reason2=Only the published, unrestricted datasets in your Dataverse can be harvested. Remote clients normally keep their records in sync through scheduled incremental updates, daily or weekly, thus minimizing the load on your server. Note that it is only the metadata that are harvested. Remote harvesters will generally not attempt to download the data files themselves.
+harvestserver.noSets.how.header=How to run a Harvesting Server?
+harvestserver.noSets.how.tip1=Harvesting server can be enabled or disabled on this page.
+harvestserver.noSets.how.tip2=Once the service is enabled, you can define collections of local datasets that will be available to remote harvesters as OAI Sets. Sets are defined by search queries (for example, authorName:king; or parentId:1234 - to select all the datasets that belong to the dataverse specified; or dsPersistentId:"doi:1234/" to select all the datasets with the persistent identifier authority specified). Consult the Search API section of the Dataverse User Guide for more information on the search queries.
+harvestserver.noSets.getStarted=To get started, enable the OAI server and click on the Add Set button. To learn more about Harvesting, visit the Harvesting section of the User Guide.
+harvestserver.btn.add=Add Set
+harvestserver.tab.header.spec=OAI setSpec
+harvestserver.tab.col.spec.default=DEFAULT
+harvestserver.tab.header.description=Description
+harvestserver.tab.header.definition=Definition Query
+harvestserver.tab.col.definition.default=All Published Local Datasets
+harvestserver.tab.header.stats=Datasets
+harvestserver.tab.col.stats.empty=No records (empty set)
+harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted)
+harvestserver.tab.header.action=Actions
+harvestserver.tab.header.action.btn.export=Run Export
+harvestserver.actions.runreexport.success=Successfully started an asynchronous re-export job for OAI set "{0}" (please reload the page to check on the export progress).
+harvestserver.tab.header.action.btn.edit=Edit
+harvestserver.tab.header.action.btn.delete=Delete
+harvestserver.tab.header.action.btn.delete.dialog.header=Delete Harvesting Set
+harvestserver.tab.header.action.btn.delete.dialog.tip=Are you sure you want to delete the OAI set "{0}"? You cannot undo a delete!
+harvestserver.tab.header.action.delete.infomessage=Selected harvesting set is being deleted. (this may take a few moments)
+harvestserver.newSetDialog.title.new=Create Harvesting Set
+harvestserver.newSetDialog.help=Define a set of local datasets available for harvesting to remote clients.
+harvestserver.newSetDialog.setspec=Name/OAI setSpec
+harvestserver.newSetDialog.setspec.tip=A unique name (OAI setSpec) identifying this set.
+harvestserver.newSetDialog.setspec.helptext=Consists of letters, digits, underscores (_) and dashes (-).
+harvestserver.editSetDialog.setspec.helptext=The name can not be changed once the set has been created.
+harvestserver.editSetDialog.setspec.helptext.default=this is the default, unnamed set
+harvestserver.newSetDialog.setspec.required=Name (OAI setSpec) cannot be empty!
+harvestserver.newSetDialog.setspec.invalid=Name (OAI setSpec) can contain only letters, digits, underscores (_) and dashes (-).
+harvestserver.newSetDialog.setspec.alreadyused=This set name (OAI setSpec) is already used.
+harvestserver.newSetDialog.setspec.sizelimit=This set name (OAI setSpec) may be no longer than 30 characters.
+harvestserver.newSetDialog.setspec.superUser.required=Only superusers may create OAI sets.
+harvestserver.newSetDialog.setdescription=Description
+harvestserver.newSetDialog.setdescription.tip=Provide a brief description for this OAI set.
+harvestserver.newSetDialog.setdescription.required=Set description cannot be empty!
+harvestserver.newSetDialog.setdescription.default=The default, "no name" set. The OAI server will serve the records from this set when no "setspec" argument is specified by the client.
+harvestserver.newSetDialog.setquery=Definition Query
+harvestserver.newSetDialog.setquery.tip=Search query that defines the content of the dataset.
+harvestserver.newSetDialog.setquery.helptext=Example query: authorName:king
+harvestserver.newSetDialog.setquery.required=Search query cannot be left empty!
+harvestserver.newSetDialog.setquery.results=Search query returned {0} datasets!
+harvestserver.newSetDialog.setquery.empty=WARNING: Search query returned no results!
+harvestserver.newSetDialog.btn.create=Create Set
+harvestserver.newSetDialog.success=Successfully created harvesting set "{0}".
+harvestserver.viewEditDialog.title=Edit Harvesting Set
+harvestserver.viewEditDialog.btn.save=Save Changes
+
+#dashboard-users.xhtml
+dashboard.card.users=Users
+dashboard.card.users.header=Dashboard - User List
+dashboard.card.users.super=Superusers
+dashboard.card.users.manage=Manage Users
+dashboard.card.users.message=List and manage users.
+dashboard.list_users.searchTerm.watermark=Search these users...
+dashboard.list_users.tbl_header.userId=ID
+dashboard.list_users.tbl_header.userIdAZ=ID (A-Z)
+dashboard.list_users.tbl_header.userIdZA=ID (Z-A)
+dashboard.list_users.tbl_header.userIdentifier=Username
+dashboard.list_users.tbl_header.userIdentifierAZ=Username (A-Z)
+dashboard.list_users.tbl_header.userIdentifierZA=Username (Z-A)
+dashboard.list_users.tbl_header.name=Name
+dashboard.list_users.tbl_header.lastName=Last Name
+dashboard.list_users.tbl_header.lastNameAZ=Last Name (A-Z)
+dashboard.list_users.tbl_header.lastNameZA=Last Name (Z-A)
+dashboard.list_users.tbl_header.firstName=First Name
+dashboard.list_users.tbl_header.email=Email
+dashboard.list_users.tbl_header.emailAZ=Email (A-Z)
+dashboard.list_users.tbl_header.emailZA=Email (Z-A)
+dashboard.list_users.tbl_header.affiliation=Affiliation
+dashboard.list_users.tbl_header.affiliationAZ=Affiliation (A-Z)
+dashboard.list_users.tbl_header.affiliationZA=Affiliation (Z-A)
+dashboard.list_users.tbl_header.roles=Roles
+dashboard.list_users.tbl_header.position=Position
+dashboard.list_users.tbl_header.isSuperuser=Superuser
+dashboard.list_users.tbl_header.superuserAZ=Superuser (A-Z)
+dashboard.list_users.tbl_header.superuserZA=Superuser (Z-A)
+dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication
+dashboard.list_users.tbl_header.authProviderFactoryAliasAZ=Authentication (A-Z)
+dashboard.list_users.tbl_header.authProviderFactoryAliasZA=Authentication (Z-A)
+dashboard.list_users.tbl_header.createdTime=Created Time
+dashboard.list_users.tbl_header.lastLoginTime=Last Login Time
+dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time
+dashboard.list_users.tbl_header.deactivated=deactivated
+dashboard.list_users.tbl_header.roles.removeAll=Remove All
+dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles
+dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}?
+dashboard.list_users.removeAll.message.success=All roles have been removed for user {0}.
+dashboard.list_users.removeAll.message.failure=Failed to remove roles for user {0}.
+dashboard.list_users.toggleSuperuser=Edit Superuser Status
+dashboard.list_users.toggleSuperuser.confirmationText.add=Are you sure you want to enable superuser status for user {0}?
+dashboard.list_users.toggleSuperuser.confirmationText.remove=Are you sure you want to disable superuser status for user {0}?
+dashboard.list_users.api.auth.invalid_apikey=The API key is invalid.
+dashboard.list_users.api.auth.not_superuser=Forbidden. You must be a superuser.
+
+#dashboard-datamove.xhtml
+dashboard.card.datamove=Data
+dashboard.card.datamove.header=Dashboard - Move Data
+dashboard.card.datamove.manage=Move Data
+dashboard.card.datamove.message=Manage and curate your installation by moving datasets from one host dataverse to another. See also Managing Datasets and Dataverses in the Admin Guide.
+dashboard.card.datamove.selectdataset.header=Dataset to Move
+dashboard.card.datamove.newdataverse.header=New Host Dataverse
+dashboard.card.datamove.dataset.label=Dataset
+dashboard.card.datamove.dataverse.label=Dataverse
+dashboard.card.datamove.confirm.dialog=Are you sure want to move this dataset?
+dashboard.card.datamove.confirm.yes=Yes, Move Data
+dashboard.card.datamove.message.success=The dataset "{0}" ({1}) has been successfully moved to {2}.
+dashboard.card.datamove.message.failure.summary=Failed to moved dataset
+dashboard.card.datamove.message.failure.details=The dataset "{0}" ({1}) could not be moved to {2}. {3}{4}
+dashboard.card.datamove.dataverse.placeholder=Enter Dataverse Identifier...
+dashboard.card.datamove.dataverse.menu.header=Dataverse Name (Affiliate), Identifier
+dashboard.card.datamove.dataverse.menu.invalidMsg=No matches found
+dashboard.card.datamove.dataset.placeholder=Enter Dataset Persistent ID, doi:...
+dashboard.card.datamove.dataset.menu.header=Dataset Persistent ID, Title, Host Dataverse Identifier
+dashboard.card.datamove.dataset.menu.invalidMsg=No matches found
+dashboard.card.datamove.dataset.command.error.targetDataverseUnpublishedDatasetPublished=A published dataset may not be moved to an unpublished dataverse. You can retry the move after publishing {0}.
+dashboard.card.datamove.dataset.command.error.targetDataverseSameAsOriginalDataverse=This dataset is already in this dataverse.
+dashboard.card.datamove.dataset.command.error.unforced.datasetGuestbookNotInTargetDataverse=The guestbook would be removed from this dataset if you moved it because the guestbook is not in the new host dataverse.
+dashboard.card.datamove.dataset.command.error.unforced.linkedToTargetDataverseOrOneOfItsParents=This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset.
+dashboard.card.datamove.dataset.command.error.unforced.suggestForce=Forcing this move is currently only available via API. Please see "Move a Dataset" under Managing Datasets and Dataverses in the Admin Guide for details.
+dashboard.card.datamove.dataset.command.error.indexingProblem=Dataset could not be moved. Indexing failed.
+
+#MailServiceBean.java
+notification.email.create.dataverse.subject={0}: Your dataverse has been created
+notification.email.create.dataset.subject={0}: Your dataset has been created
+notification.email.request.file.access.subject={0}: Access has been requested for a restricted file
+notification.email.grant.file.access.subject={0}: You have been granted access to a restricted file
+notification.email.rejected.file.access.subject={0}: Your request for access to a restricted file has been rejected
+notification.email.submit.dataset.subject={0}: Your dataset has been submitted for review
+notification.email.publish.dataset.subject={0}: Your dataset has been published
+notification.email.publishFailure.dataset.subject={0}: Failed to publish your dataset
+notification.email.returned.dataset.subject={0}: Your dataset has been returned
+notification.email.workflow.success.subject={0}: Your dataset has been processed
+notification.email.workflow.success=A workflow running on {0} (view at {1}) succeeded: {2}
+notification.email.workflow.failure.subject={0}: Failed to process your dataset
+notification.email.workflow.failure=A workflow running on {0} (view at {1}) failed: {2}
+notification.email.workflow.nullMessage=No additional message sent from the workflow.
+notification.email.create.account.subject={0}: Your account has been created
+notification.email.assign.role.subject={0}: You have been assigned a role
+notification.email.revoke.role.subject={0}: Your role has been revoked
+notification.email.verifyEmail.subject={0}: Verify your email address
+notification.email.ingestCompleted.subject={0}: Your ingest has successfully finished!
+notification.email.ingestCompletedWithErrors.subject={0}: Your ingest has finished with errors!
+notification.email.greeting=Hello, \n
+notification.email.greeting.html=Hello,
+# Bundle file editors, please note that "notification.email.welcome" is used in a unit test
+notification.email.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the User Guide at {1}/{2}/user or contact {3} at {4} for assistance.
+notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page.
+notification.email.requestFileAccess=File access requested for dataset: {0} by {1} ({2}). Manage permissions at {3}.
+notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1}).
+notification.email.rejectFileAccess=Your request for access was rejected for the requested files in the dataset: {0} (view at {1}). If you have any questions about why your request was rejected, you may reach the dataset owner using the "Contact" link on the upper right corner of the dataset page.
+# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test
+notification.email.createDataverse=Your new dataverse named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with your dataverse, check out the Dataverse Management - User Guide at {4}/{5}/user/dataverse-management.html .
+# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test
+notification.email.createDataset=Your new dataset named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with a dataset, check out the Dataset Management - User Guide at {4}/{5}/user/dataset-management.html .
+notification.email.wasSubmittedForReview={0} (view at {1}) was submitted for review to be published in {2} (view at {3}). Don''t forget to publish it or send it back to the contributor, {4} ({5})\!
+notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}).
+notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}).
+notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen.
+#notification.email.closing=\n\nYou may contact us for support at {0}.\n\nThank you,\n{1}
+#UB
+notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1}
+
+notification.email.closing.html=
You may contact us for support at {0}.
Thank you, {1}
+notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}).
+notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}).
+notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance.
+notification.email.passwordReset=Hi {0},\n\nSomeone, hopefully you, requested a password reset for {1}.\n\nPlease click the link below to reset your Dataverse account password:\n\n {2} \n\n The link above will only work for the next {3} minutes.\n\n Please contact us if you did not request this password reset or need further help.
+notification.email.passwordReset.subject=Dataverse Password Reset Requested
+hours=hours
+hour=hour
+minutes=minutes
+minute=minute
+notification.email.checksumfail.subject={0}: Your upload failed checksum validation
+notification.email.import.filesystem.subject=Dataset {0} has been successfully uploaded and verified
+notification.email.import.checksum.subject={0}: Your file checksum job has completed
+contact.delegation={0} on behalf of {1}
+contact.delegation.default_personal=Dataverse Installation Admin
+notification.email.info.unavailable=Unavailable
+notification.email.apiTokenGenerated=Hello {0} {1},\n\nAPI Token has been generated. Please keep it secure as you would do with a password.
+notification.email.apiTokenGenerated.subject=API Token was generated
+
+# dataverse.xhtml
+dataverse.name=Dataverse Name
+dataverse.name.title=The project, department, university, professor, or journal this dataverse will contain data for.
+dataverse.enterName=Enter name...
+dataverse.host.title=The dataverse which contains this data.
+dataverse.host.tip=Changing the host dataverse will clear any fields you may have entered data into.
+dataverse.host.autocomplete.nomatches=No matches
+dataverse.identifier.title=Short name used for the URL of this dataverse.
+dataverse.affiliation.title=The organization with which this dataverse is affiliated.
+dataverse.storage.title=A storage service to be used for datasets in this dataverse.
+dataverse.category=Category
+dataverse.category.title=The type that most closely reflects this dataverse.
+dataverse.type.selectTab.top=Select one...
+dataverse.type.selectTab.researchers=Researcher
+dataverse.type.selectTab.researchProjects=Research Project
+dataverse.type.selectTab.journals=Journal
+dataverse.type.selectTab.organizationsAndInsitutions=Organization or Institution
+dataverse.type.selectTab.teachingCourses=Teaching Course
+dataverse.type.selectTab.uncategorized=Uncategorized
+dataverse.type.selectTab.researchGroup=Research Group
+dataverse.type.selectTab.laboratory=Laboratory
+dataverse.type.selectTab.department=Department
+dataverse.description.title=A summary describing the purpose, nature, or scope of this dataverse.
+dataverse.email=Email
+dataverse.email.title=The e-mail address(es) of the contact(s) for the dataverse.
+dataverse.share.dataverseShare=Share Dataverse
+dataverse.share.dataverseShare.tip=Share this dataverse on your favorite social media networks.
+dataverse.share.dataverseShare.shareText=View this dataverse.
+dataverse.subject.title=Subject(s) covered in this dataverse.
+dataverse.metadataElements=Metadata Fields
+dataverse.metadataElements.tip=Choose the metadata fields to use in dataset templates and when adding a dataset to this dataverse.
+dataverse.metadataElements.from.tip=Use metadata fields from {0}
+dataverse.resetModifications=Reset Modifications
+dataverse.resetModifications.text=Are you sure you want to reset the selected metadata fields? If you do this, any customizations (hidden, required, optional) you have done will no longer appear.
+dataverse.field.required=(Required)
+dataverse.field.example1= (Examples:
+dataverse.field.example2=)
+dataverse.field.set.tip=[+] View fields + set as hidden, required, or optional
+dataverse.field.set.view=[+] View fields
+dataverse.field.requiredByDataverse=Required by Dataverse
+dataverse.facetPickList.text=Browse/Search Facets
+dataverse.facetPickList.tip=Choose the metadata fields to use as facets for browsing datasets and dataverses in this dataverse.
+dataverse.facetPickList.facetsFromHost.text=Use browse/search facets from {0}
+dataverse.facetPickList.metadataBlockList.all=All Metadata Fields
+dataverse.edit=Edit
+dataverse.option.generalInfo=General Information
+dataverse.option.themeAndWidgets=Theme + Widgets
+dataverse.option.featuredDataverse=Featured Dataverses
+dataverse.option.permissions=Permissions
+dataverse.option.dataverseGroups=Groups
+dataverse.option.datasetTemplates=Dataset Templates
+dataverse.option.datasetGuestbooks=Dataset Guestbooks
+dataverse.option.deleteDataverse=Delete Dataverse
+dataverse.publish.btn=Publish
+dataverse.publish.header=Publish Dataverse
+dataverse.nopublished=No Published Dataverses
+dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse.
+dataverse.contact=Email Dataverse Contact
+dataverse.link=Link Dataverse
+dataverse.link.btn.tip=Link to Your Dataverse
+dataverse.link.yourDataverses=Your Dataverse
+dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name
+dataverse.link.save=Save Linked Dataverse
+dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to.
+dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}.
+dataverse.link.dataset.none=No linkable dataverses available.
+dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in.
+dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started.
+dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses.
+dataverse.savedsearch.link=Link Search
+dataverse.savedsearch.searchquery=Search
+dataverse.savedsearch.filterQueries=Facets
+dataverse.savedsearch.save=Save Linked Search
+dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to.
+dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search.
+# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test
+dataverse.saved.search.success=The saved search has been successfully linked to {0}.
+dataverse.saved.search.failure=The saved search was not able to be linked.
+dataverse.linked.success= {0} has been successfully linked to {1}.
+dataverse.linked.success.wait= {0} has been successfully linked to {1}. Please wait for its contents to appear.
+dataverse.linked.internalerror={0} has been successfully linked to {1} but contents will not appear until an internal error has been fixed.
+dataverse.linked.error.alreadyLinked={0} has already been linked to {1}.
+dataverse.page.pre=Previous
+dataverse.page.next=Next
+dataverse.byCategory=Dataverses by Category
+dataverse.displayFeatured=Display the dataverses selected below on the landing page of this dataverse.
+dataverse.selectToFeature=Select dataverses to feature on the landing page of this dataverse.
+dataverse.publish.tip=Are you sure you want to publish your dataverse? Once you do so it must remain published.
+dataverse.publish.failed.tip=This dataverse cannot be published because the dataverse it is in has not been published.
+dataverse.publish.failed=Cannot publish dataverse.
+dataverse.publish.success=Your dataverse is now public.
+dataverse.publish.failure=This dataverse was not able to be published.
+dataverse.delete.tip=Are you sure you want to delete your dataverse? You cannot undelete this dataverse.
+dataverse.delete=Delete Dataverse
+dataverse.delete.success=Your dataverse has been deleted.
+dataverse.delete.failure=This dataverse was not able to be deleted.
+# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters
+dataverse.create.success=You have successfully created your dataverse! To learn more about what you can do with your dataverse, check out the User Guide.
+dataverse.create.failure=This dataverse was not able to be created.
+dataverse.create.authenticatedUsersOnly=Only authenticated users can create dataverses.
+dataverse.update.success=You have successfully updated your dataverse!
+dataverse.update.failure=This dataverse was not able to be updated.
+dataverse.selected=Selected
+dataverse.listing.error=Fatal error trying to list the contents of the dataverse. Please report this error to the Dataverse administrator.
+dataverse.datasize=Total size of the files stored in this dataverse: {0} bytes
+dataverse.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataverse. Please report this error to the Dataverse administrator.
+dataverse.storage.inherited=(inherited from enclosing Dataverse)
+dataverse.storage.default=(Default)
+# rolesAndPermissionsFragment.xhtml
+
+# advanced.xhtml
+advanced.search.header.dataverses=Dataverses
+advanced.search.dataverses.name.tip=The project, department, university, professor, or journal this Dataverse will contain data for.
+advanced.search.dataverses.affiliation.tip=The organization with which this Dataverse is affiliated.
+advanced.search.dataverses.description.tip=A summary describing the purpose, nature, or scope of this Dataverse.
+advanced.search.dataverses.subject.tip=Domain-specific Subject Categories that are topically relevant to this Dataverse.
+advanced.search.header.datasets=Datasets
+advanced.search.header.files=Files
+advanced.search.files.name.tip=The name given to identify the file.
+advanced.search.files.description.tip=A summary describing the file and its variables.
+advanced.search.files.persistentId.tip=The persistent identifier for the file.
+advanced.search.files.persistentId=Data File Persistent ID
+advanced.search.files.persistentId.tip=The unique persistent identifier for a data file, which can be a Handle or DOI in Dataverse.
+advanced.search.files.fileType=File Type
+advanced.search.files.fileType.tip=The file type, e.g. Comma Separated Values, Plain Text, R, etc.
+advanced.search.files.variableName=Variable Name
+advanced.search.files.variableName.tip=The name of the variable's column in the data frame.
+advanced.search.files.variableLabel=Variable Label
+advanced.search.files.variableLabel.tip=A short description of the variable.
+advanced.search.datasets.persistentId.tip=The persistent identifier for the dataset.
+advanced.search.datasets.persistentId=Dataset Persistent ID
+advanced.search.datasets.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse.
+advanced.search.files.fileTags=File Tags
+advanced.search.files.fileTags.tip=Terms such "Documentation", "Data", or "Code" that have been applied to files.
+
+# search
+search.datasets.literalquestion=Text of the actual, literal question asked.
+search.datasets.interviewinstructions=Specific instructions to the individual conducting an interview.
+search.datasets.postquestion=Text describing what occurs after the literal question has been asked.
+search.datasets.variableuniverse=The group of persons or other elements that are the object of research and to which any analytic results refer.
+search.datasets.variableNotes=For clarifying information/annotation regarding the variable.
+
+# search-include-fragment.xhtml
+dataverse.search.advancedSearch=Advanced Search
+dataverse.search.input.watermark=Search this dataverse...
+account.search.input.watermark=Search this data...
+dataverse.search.btn.find=Find
+dataverse.results.btn.addData=Add Data
+dataverse.results.btn.addData.newDataverse=New Dataverse
+dataverse.results.btn.addData.newDataset=New Dataset
+dataverse.results.dialog.addDataGuest.header=Add Data
+dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset.
+#dataverse.results.dialog.addDataGuest.msg.signup=Sign up or log in to create a dataverse or add a dataset.
+#UB:
+dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset.
+
+dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account
+dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account
+dataverse.results.types.dataverses=Dataverses
+dataverse.results.types.datasets=Datasets
+dataverse.results.types.files=Files
+dataverse.results.btn.filterResults=Filter Results
+# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test
+dataverse.results.empty.zero=There are no dataverses, datasets, or files that match your search. Please try a new search by using other or broader terms. You can also check out the search guide for tips.
+# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test
+dataverse.results.empty.hidden=There are no search results based on how you have narrowed your search. You can check out the search guide for tips.
+dataverse.results.empty.browse.guest.zero=This dataverse currently has no dataverses, datasets, or files. Please log in to see if you are able to add to it.
+dataverse.results.empty.browse.guest.hidden=There are no dataverses within this dataverse. Please log in to see if you are able to add to it.
+dataverse.results.empty.browse.loggedin.noperms.zero=This dataverse currently has no dataverses, datasets, or files. You can use the Email Dataverse Contact button above to ask about this dataverse or request access for this dataverse.
+dataverse.results.empty.browse.loggedin.noperms.hidden=There are no dataverses within this dataverse.
+dataverse.results.empty.browse.loggedin.perms.zero=This dataverse currently has no dataverses, datasets, or files. You can add to it by using the Add Data button on this page.
+account.results.empty.browse.loggedin.perms.zero=You have no dataverses, datasets, or files associated with your account. You can add a dataverse or dataset by clicking the Add Data button above. Read more about adding data in the User Guide.
+dataverse.results.empty.browse.loggedin.perms.hidden=There are no dataverses within this dataverse. You can add to it by using the Add Data button on this page.
+dataverse.results.empty.link.technicalDetails=More technical details
+dataverse.search.facet.error=There was an error with your search parameters. Please clear your search and try again.
+dataverse.results.count.toofresults={0} to {1} of {2} {2, choice, 0#Results|1#Result|2#Results}
+dataverse.results.paginator.current=(Current)
+dataverse.results.btn.sort=Sort
+dataverse.results.btn.sort.option.nameAZ=Name (A-Z)
+dataverse.results.btn.sort.option.nameZA=Name (Z-A)
+dataverse.results.btn.sort.option.newest=Newest
+dataverse.results.btn.sort.option.oldest=Oldest
+dataverse.results.btn.sort.option.relevance=Relevance
+dataverse.results.cards.foundInMetadata=Found in Metadata Fields:
+dataverse.results.cards.files.tabularData=Tabular Data
+dataverse.results.solrIsDown=Please note: Due to an internal error, browsing and searching is not available.
+dataverse.theme.title=Theme
+dataverse.theme.inheritCustomization.title=For this dataverse, use the same theme as the parent dataverse.
+dataverse.theme.inheritCustomization.label=Inherit Theme
+dataverse.theme.inheritCustomization.checkbox=Inherit theme from {0}
+dataverse.theme.logo=Logo
+dataverse.theme.logo.tip=Supported image types are JPG, TIF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high.
+dataverse.theme.logo.format=Logo Format
+dataverse.theme.logo.format.selectTab.square=Square
+dataverse.theme.logo.format.selectTab.rectangle=Rectangle
+dataverse.theme.logo.alignment=Logo Alignment
+dataverse.theme.logo.alignment.selectTab.left=Left
+dataverse.theme.logo.alignment.selectTab.center=Center
+dataverse.theme.logo.alignment.selectTab.right=Right
+dataverse.theme.logo.backColor=Logo Background Color
+dataverse.theme.logo.image.upload=Upload Image
+dataverse.theme.tagline=Tagline
+dataverse.theme.website=Website
+dataverse.theme.linkColor=Link Color
+dataverse.theme.txtColor=Text Color
+dataverse.theme.backColor=Background Color
+dataverse.theme.success=You have successfully updated the theme for this dataverse!
+dataverse.theme.failure=The dataverse theme has not been updated.
+dataverse.theme.logo.image=Logo Image
+dataverse.theme.logo.imageFooter=Footer Image
+dataverse.theme.logo.image.title=The logo or image file you wish to display in the header of this dataverse.
+dataverse.theme.logo.image.footer=The logo or image file you wish to display in the footer of this dataverse.
+dataverse.theme.logo.image.uploadNewFile=Upload New File
+dataverse.theme.logo.image.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file.
+dataverse.theme.logo.image.uploadImgFile=Upload Image File
+dataverse.theme.logo.format.title=The shape for the logo or image file you upload for this dataverse.
+dataverse.theme.logo.alignment.title=Where the logo or image should display in the header or footer.
+dataverse.theme.logo.backColor.title=Select a color to display in the header or footer of this dataverse.
+dataverse.theme.headerColor=Header Colors
+dataverse.theme.headerColor.tip=Colors you select to style the header of this dataverse.
+dataverse.theme.backColor.title=Color for the header area that contains the image, tagline, URL, and text.
+dataverse.theme.linkColor.title=Color for the link to display as.
+dataverse.theme.txtColor.title=Color for the tagline text and the name of this dataverse.
+dataverse.theme.tagline.title=A phrase or sentence that describes this dataverse.
+dataverse.theme.tagline.tip=Provide a tagline that is 140 characters or less.
+dataverse.theme.website.title=URL for your personal website, institution, or any website that relates to this dataverse.
+dataverse.theme.website.tip=The website will be linked behind the tagline. To have a website listed, you must also provide a tagline.
+dataverse.theme.website.watermark=Your personal site, http://...
+dataverse.theme.website.invalidMsg=Invalid URL.
+dataverse.theme.disabled=The theme for the root dataverse has been administratively disabled with the :DisableRootDataverseTheme database setting.
+dataverse.widgets.title=Widgets
+dataverse.widgets.notPublished.why.header=Why Use Widgets?
+dataverse.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website.
+dataverse.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website.
+dataverse.widgets.notPublished.how.header=How To Use Widgets
+dataverse.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published.
+dataverse.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website.
+dataverse.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here.
+dataverse.widgets.notPublished.getStarted=To get started, publish your dataverse. To learn more about Widgets, visit the Theme + Widgets section of the User Guide.
+dataverse.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Theme + Widgets section of the User Guide.
+dataverse.widgets.searchBox.txt=Dataverse Search Box
+dataverse.widgets.searchBox.tip=Add a way for visitors on your website to be able to search Dataverse.
+dataverse.widgets.dataverseListing.txt=Dataverse Listing
+dataverse.widgets.dataverseListing.tip=Add a way for visitors on your website to be able to view your dataverses and datasets, sort, or browse through them.
+dataverse.widgets.advanced.popup.header=Widget Advanced Options
+dataverse.widgets.advanced.prompt=Forward dataset citation persistent URL's to your personal website. The page you submit as your Personal Website URL must contain the code snippet for the Dataverse Listing widget.
+dataverse.widgets.advanced.url.label=Personal Website URL
+dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name
+dataverse.widgets.advanced.invalid.message=Please enter a valid URL
+dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL
+dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated.
+
+# permissions-manage.xhtml
+dataverse.permissions.title=Permissions
+dataverse.permissions.dataset.title=Dataset Permissions
+dataverse.permissions.access.accessBtn=Edit Access
+dataverse.permissions.usersOrGroups=Users/Groups
+dataverse.permissions.requests=Requests
+dataverse.permissions.usersOrGroups.assignBtn=Assign Roles to Users/Groups
+dataverse.permissions.usersOrGroups.createGroupBtn=Create Group
+dataverse.permissions.usersOrGroups.description=All the users and groups that have access to your dataverse.
+dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation)
+dataverse.permissions.usersOrGroups.tabHeader.id=ID
+dataverse.permissions.usersOrGroups.tabHeader.role=Role
+dataverse.permissions.usersOrGroups.tabHeader.action=Action
+dataverse.permissions.usersOrGroups.assignedAt=Role assigned at {0}
+dataverse.permissions.usersOrGroups.removeBtn=Remove Assigned Role
+dataverse.permissions.usersOrGroups.removeBtn.confirmation=Are you sure you want to remove this role assignment?
+dataverse.permissions.roles=Roles
+dataverse.permissions.roles.add=Add New Role
+dataverse.permissions.roles.description=All the roles set up in your dataverse, that you can assign to users and groups.
+dataverse.permissions.roles.edit=Edit Role
+dataverse.permissions.roles.copy=Copy Role
+dataverse.permissions.roles.alias.required=Please enter a unique identifier for this role.
+dataverse.permissions.roles.name.required=Please enter a name for this role.
+
+# permissions-manage-files.xhtml
+dataverse.permissionsFiles.title=Restricted File Permissions
+dataverse.permissionsFiles.usersOrGroups=Users/Groups
+dataverse.permissionsFiles.usersOrGroups.assignBtn=Grant Access to Users/Groups
+dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to restricted files in this dataset.
+dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation)
+dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID
+dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email
+dataverse.permissionsFiles.usersOrGroups.tabHeader.authentication=Authentication
+dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Files
+dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Access
+dataverse.permissionsFiles.usersOrGroups.file=File
+dataverse.permissionsFiles.usersOrGroups.files=Files
+dataverse.permissionsFiles.usersOrGroups.invalidMsg=There are no users or groups with access to the restricted files in this dataset.
+dataverse.permissionsFiles.files=Restricted Files
+dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files}
+dataverse.permissionsFiles.files.description=All the restricted files in this dataset.
+dataverse.permissionsFiles.files.tabHeader.fileName=File Name
+dataverse.permissionsFiles.files.tabHeader.roleAssignees=Users/Groups
+dataverse.permissionsFiles.files.tabHeader.access=Access
+dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Published
+dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Draft
+dataverse.permissionsFiles.files.deleted=Deleted
+dataverse.permissionsFiles.files.public=Public
+dataverse.permissionsFiles.files.restricted=Restricted
+dataverse.permissionsFiles.files.roleAssignee=User/Group
+dataverse.permissionsFiles.files.roleAssignees=Users/Groups
+dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups}
+dataverse.permissionsFiles.files.assignBtn=Assign Access
+dataverse.permissionsFiles.files.invalidMsg=There are no restricted files in this dataset.
+dataverse.permissionsFiles.files.requested=Requested Files
+dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2}
+dataverse.permissionsFiles.files.includeDeleted=Include Deleted Files
+dataverse.permissionsFiles.viewRemoveDialog.header=File Access
+dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Remove Access
+dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=Are you sure you want to remove access to this file? Once access has been removed, the user or group will no longer be able to download this file.
+dataverse.permissionsFiles.assignDialog.header=Grant File Access
+dataverse.permissionsFiles.assignDialog.description=Grant file access to users and groups.
+dataverse.permissionsFiles.assignDialog.userOrGroup=Users/Groups
+dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Enter User/Group Name
+dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No matches found.
+dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group.
+dataverse.permissionsFiles.assignDialog.fileName=File Name
+dataverse.permissionsFiles.assignDialog.grantBtn=Grant
+dataverse.permissionsFiles.assignDialog.rejectBtn=Reject
+
+# permissions-configure.xhtml
+dataverse.permissions.accessDialog.header=Edit Access
+dataverse.permissions.description=Current access configuration to your dataverse.
+dataverse.permissions.tip=Select if all users or only certain users are able to add to this dataverse, by clicking the Edit Access button.
+dataverse.permissions.Q1=Who can add to this dataverse?
+dataverse.permissions.Q1.answer1=Anyone adding to this dataverse needs to be given access
+dataverse.permissions.Q1.answer2=Anyone with a Dataverse account can add sub dataverses
+dataverse.permissions.Q1.answer3=Anyone with a Dataverse account can add datasets
+dataverse.permissions.Q1.answer4=Anyone with a Dataverse account can add sub dataverses and datasets
+dataverse.permissions.Q2=When a user adds a new dataset to this dataverse, which role should be automatically assigned to them on that dataset?
+dataverse.permissions.Q2.answer.editor.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, Submit datasets for review
+dataverse.permissions.Q2.answer.manager.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use)
+dataverse.permissions.Q2.answer.curator.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use), Edit Permissions/Assign Roles + Publish
+permission.anyoneWithAccount=Anyone with a Dataverse account
+
+# roles-assign.xhtml
+dataverse.permissions.usersOrGroups.assignDialog.header=Assign Role
+dataverse.permissions.usersOrGroups.assignDialog.description=Grant permissions to users and groups by assigning them a role.
+dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Users/Groups
+dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Enter User/Group Name
+dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No matches found.
+dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group.
+dataverse.permissions.usersOrGroups.assignDialog.role.description=These are the permissions associated with the selected role.
+dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}.
+dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Please select a role to assign.
+
+# roles-edit.xhtml
+dataverse.permissions.roles.header=Edit Role
+dataverse.permissions.roles.name=Role Name
+dataverse.permissions.roles.name.title=Enter a name for the role.
+dataverse.permissions.roles.id=Identifier
+dataverse.permissions.roles.id.title=Enter a name for the alias.
+dataverse.permissions.roles.description.title=Describe the role (1000 characters max).
+dataverse.permissions.roles.description.counter={0} characters remaining
+dataverse.permissions.roles.roleList.header=Role Permissions
+dataverse.permissions.roles.roleList.authorizedUserOnly=Permissions with an asterisk icon indicate actions that can be performed by users not logged into Dataverse.
+
+# explicitGroup-new-dialog.xhtml
+dataverse.permissions.explicitGroupEditDialog.title.new=Create Group
+dataverse.permissions.explicitGroupEditDialog.title.edit=Edit Group {0}
+dataverse.permissions.explicitGroupEditDialog.help=Add users or other groups to this group.
+dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Group Identifier
+dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group.
+dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=Group identifier cannot be empty
+dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=Group identifier can contain only letters, digits, underscores (_) and dashes (-)
+dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consists of letters, digits, underscores (_) and dashes (-)
+dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=Group identifier already used in this dataverse
+dataverse.permissions.explicitGroupEditDialog.groupName=Group Name
+dataverse.permissions.explicitGroupEditDialog.groupName.required=Group name cannot be empty
+dataverse.permissions.explicitGroupEditDialog.groupDescription=Description
+dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=User/Group
+dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Users/Groups
+dataverse.permissions.explicitGroupEditDialog.createGroup=Create Group
+
+# manage-templates.xhtml
+dataset.manageTemplates.pageTitle=Manage Dataset Templates
+dataset.manageTemplates.select.txt=Include Templates from {0}
+dataset.manageTemplates.createBtn=Create Dataset Template
+dataset.manageTemplates.saveNewTerms=Save Dataset Template
+dataset.manageTemplates.noTemplates.why.header=Why Use Templates?
+dataset.manageTemplates.noTemplates.why.reason1=Templates are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in.
+dataset.manageTemplates.noTemplates.why.reason2=Templates can be used to input instructions for those uploading datasets into your dataverse if you have a specific way you want a metadata field to be filled out.
+dataset.manageTemplates.noTemplates.how.header=How To Use Templates
+dataset.manageTemplates.noTemplates.how.tip1=Templates are created at the dataverse level, can be deleted (so it does not show for future datasets), set to default (not required), and can be copied so you do not have to start over when creating a new template with similar metadata from another template. When a template is deleted, it does not impact the datasets that have used the template already.
+dataset.manageTemplates.noTemplates.how.tip2=Please note that the ability to choose which metadata fields are hidden, required, or optional is done on the General Information page for this dataverse.
+dataset.manageTemplates.noTemplates.getStarted=To get started, click on the Create Dataset Template button above. To learn more about templates, visit the Dataset Templates section of the User Guide.
+dataset.manageTemplates.tab.header.templte=Template Name
+dataset.manageTemplates.tab.header.date=Date Created
+dataset.manageTemplates.tab.header.usage=Usage
+dataset.manageTemplates.tab.header.action=Action
+dataset.manageTemplates.tab.action.btn.makeDefault=Make Default
+dataset.manageTemplates.tab.action.btn.default=Default
+dataset.manageTemplates.tab.action.btn.view=View
+dataset.manageTemplates.tab.action.btn.copy=Copy
+dataset.manageTemplates.tab.action.btn.edit=Edit
+dataset.manageTemplates.tab.action.btn.edit.metadata=Metadata
+dataset.manageTemplates.tab.action.btn.edit.terms=Terms
+dataset.manageTemplates.tab.action.btn.delete=Delete
+dataset.manageTemplates.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this template? A new dataset will not be able to use this template.
+dataset.manageTemplates.tab.action.btn.delete.dialog.header=Delete Template
+dataset.manageTemplates.tab.action.btn.view.dialog.header=Dataset Template Preview
+dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Dataset Template
+dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=The dataset template which prepopulates info into the form automatically.
+dataset.manageTemplates.tab.action.noedit.createdin=Template created at {0}
+dataset.manageTemplates.delete.usedAsDefault=This template is the default template for the following dataverse(s). It will be removed as default as well.
+dataset.message.manageTemplates.label=Manage Dataset Templates
+dataset.message.manageTemplates.message=Create a template prefilled with metadata fields standard values, such as Author Affiliation, or add instructions in the metadata fields to give depositors more information on what metadata is expected.
+
+# metadataFragment.xhtml
+dataset.anonymized.withheld=withheld
+
+# template.xhtml
+dataset.template.name.tip=The name of the dataset template.
+dataset.template.returnBtn=Return to Manage Templates
+dataset.template.name.title=Enter a unique name for the template.
+template.asterisk.tip=Asterisks indicate metadata fields that users will be required to fill out while adding a dataset to this dataverse.
+dataset.template.popup.create.title=Create Template
+dataset.template.popup.create.text=Do you want to add default Terms of Use and/or Access?
+dataset.create.add.terms=Save + Add Terms
+
+# manage-groups.xhtml
+dataverse.manageGroups.pageTitle=Manage Dataverse Groups
+dataverse.manageGroups.createBtn=Create Group
+dataverse.manageGroups.noGroups.why.header=Why Use Groups?
+dataverse.manageGroups.noGroups.why.reason1=Groups allow you to assign roles and permissions for many users at once.
+dataverse.manageGroups.noGroups.why.reason2=You can use groups to manage multiple different kinds of users (students, collaborators, etc.)
+dataverse.manageGroups.noGroups.how.header=How To Use Groups
+dataverse.manageGroups.noGroups.how.tip1=A group can contain both users and other groups.
+dataverse.manageGroups.noGroups.how.tip2=You can assign permissions to a group in the "Permissions" view.
+dataverse.manageGroups.noGroups.getStarted=To get started, click on the Create Group button above.
+dataverse.manageGroups.tab.header.name=Group Name
+dataverse.manageGroups.tab.header.id=Group ID
+dataverse.manageGroups.tab.header.membership=Membership
+dataverse.manageGroups.tab.header.action=Action
+dataverse.manageGroups.tab.action.btn.view=View
+dataverse.manageGroups.tab.action.btn.copy=Copy
+dataverse.manageGroups.tab.action.btn.enable=Enable
+dataverse.manageGroups.tab.action.btn.disable=Disable
+dataverse.manageGroups.tab.action.btn.edit=Edit
+dataverse.manageGroups.tab.action.btn.viewCollectedData=View Collected Data
+dataverse.manageGroups.tab.action.btn.delete=Delete
+dataverse.manageGroups.tab.action.btn.delete.dialog.header=Delete Group
+dataverse.manageGroups.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this group? You cannot undelete a group.
+dataverse.manageGroups.tab.action.btn.view.dialog.header=Dataverse Group
+dataverse.manageGroups.tab.action.btn.view.dialog.group=Group Name
+dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Member Name
+dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Member Type
+dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Action
+dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Delete
+dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Group Members
+dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Enter User/Group Name
+dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=No matches found.
+
+# manage-guestbooks.xhtml
+dataset.manageGuestbooks.pageTitle=Manage Dataset Guestbooks
+dataset.manageGuestbooks.include=Include Guestbooks from {0}
+dataset.manageGuestbooks.createBtn=Create Dataset Guestbook
+dataset.manageGuestbooks.download.all.responses=Download All Responses
+dataset.manageGuestbooks.download.responses=Download Responses
+dataset.manageGuestbooks.noGuestbooks.why.header=Why Use Guestbooks?
+dataset.manageGuestbooks.noGuestbooks.why.reason1=Guestbooks allow you to collect data about who is downloading the files from your datasets. You can decide to collect account information (username, given name & last name, affiliation, etc.) as well as create custom questions (e.g., What do you plan to use this data for?).
+dataset.manageGuestbooks.noGuestbooks.why.reason2=You can download the data collected from the enabled guestbooks to be able to store it outside of Dataverse.
+dataset.manageGuestbooks.noGuestbooks.how.header=How To Use Guestbooks
+dataset.manageGuestbooks.noGuestbooks.how.tip1=A guestbook can be used for multiple datasets but only one guestbook can be used for a dataset.
+dataset.manageGuestbooks.noGuestbooks.how.tip2=Custom questions can have free form text answers or have a user select an answer from several options.
+dataset.manageGuestbooks.noGuestbooks.getStarted=To get started, click on the Create Dataset Guestbook button above. To learn more about Guestbooks, visit the Dataset Guestbook section of the User Guide.
+dataset.manageGuestbooks.tab.header.name=Guestbook Name
+dataset.manageGuestbooks.tab.header.date=Date Created
+dataset.manageGuestbooks.tab.header.usage=Usage
+dataset.manageGuestbooks.tab.header.responses=Responses
+dataset.manageGuestbooks.tab.header.action=Action
+dataset.manageGuestbooks.tab.action.btn.view=Preview
+dataset.manageGuestbooks.tab.action.btn.copy=Copy
+dataset.manageGuestbooks.tab.action.btn.enable=Enable
+dataset.manageGuestbooks.tab.action.btn.disable=Disable
+dataset.manageGuestbooks.tab.action.btn.edit=Edit
+dataset.manageGuestbooks.tab.action.btn.preview=Preview
+dataset.manageGuestbooks.tab.action.btn.viewCollectedData=View Responses
+dataset.manageGuestbooks.tab.action.btn.delete=Delete
+dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Delete Guestbook
+dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this guestbook? You cannot undelete a guestbook.
+dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Preview Guestbook
+dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Upon downloading files the guestbook asks for the following information.
+dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Guestbook Name
+dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Dataset Guestbook Collected Data
+dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=User data collected by the guestbook.
+dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Collected Data
+dataset.manageGuestbooks.tab.action.noedit.createdin=Guestbook created at {0}
+dataset.manageGuestbooks.message.deleteSuccess=The guestbook has been deleted.
+dataset.manageGuestbooks.message.deleteFailure=The guestbook cannot be deleted.
+dataset.manageGuestbooks.message.editSuccess=The guestbook has been updated.
+dataset.manageGuestbooks.message.editFailure=The guestbook could not be updated.
+dataset.manageGuestbooks.message.enableSuccess=The guestbook has been enabled.
+dataset.manageGuestbooks.message.enableFailure=The guestbook could not be enabled.
+dataset.manageGuestbooks.message.disableSuccess=The guestbook has been disabled.
+dataset.manageGuestbooks.message.disableFailure=The guestbook could not be disabled.
+dataset.manageGuestbooks.tip.title=Manage Dataset Guestbooks
+dataset.manageGuestbooks.tip.downloadascsv=Click \"Download All Responses\" to download all collected guestbook responses for this dataverse, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software.
+dataset.guestbooksResponses.dataset=Dataset
+dataset.guestbooksResponses.date=Date
+dataset.guestbooksResponses.type=Type
+dataset.guestbooksResponses.file=File
+dataset.guestbooksResponses.customQuestions=Custom Questions
+dataset.guestbooksResponses.user=User
+dataset.guestbooksResponses.tip.title=Guestbook Responses
+dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Responses|1#Response|2#Responses}
+dataset.guestbooksResponses.count.toofresults={0} to {1} of {2} {2, choice, 0#Responses|1#Response|2#Responses}
+dataset.guestbooksResponses.tip.downloadascsv=Click \"Download Responses\" to download all collected responses for this guestbook, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software.
+dataset.guestbooksResponses.tooManyResponses.message=Note: this guestbook has too many responses to display on this page. Only the most recent {0} responses are shown below. Click \"Download Responses\" to download all collected responses ({1} total) as a CSV file.
+
+# guestbook-responses.xhtml
+dataset.guestbookResponses.pageTitle=Guestbook Responses
+
+# guestbook.xhtml
+dataset.manageGuestbooks.guestbook.name=Guestbook Name
+dataset.manageGuestbooks.guestbook.name.tip=Enter a unique name for this Guestbook.
+dataset.manageGuestbooks.guestbook.dataCollected=Data Collected
+dataset.manageGuestbooks.guestbook.dataCollected.description=Dataverse account information that will be collected when a user downloads a file. Check the ones that will be required.
+dataset.manageGuestbooks.guestbook.customQuestions=Custom Questions
+dataset.manageGuestbooks.guestbook.accountInformation=Account Information
+dataset.manageGuestbooks.guestbook.required=(Required)
+dataset.manageGuestbooks.guestbook.optional=(Optional)
+dataset.manageGuestbooks.guestbook.customQuestions.description=Create your own questions to have users provide more than their account information when they download a file. Questions can be required or optional and answers can be text or multiple choice.
+dataset.manageGuestbooks.guestbook.customQuestions.questionType=Question Type
+dataset.manageGuestbooks.guestbook.customQuestions.questionText=Question Text
+dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Response Options
+dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Text
+dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=Multiple Choice
+
+# guestbookResponseFragment.xhtml
+dataset.guestbookResponse.guestbook.additionalQuestions=Additional Questions
+dataset.guestbookResponse.showPreview.errorMessage=Can't show preview.
+dataset.guestbookResponse.showPreview.errorDetail=Couldn't write guestbook response.
+
+# dataset.xhtml
+dataset.configureBtn=Configure
+dataset.pageTitle=Add New Dataset
+
+dataset.accessBtn=Access Dataset
+dataset.accessBtn.header.download=Download Options
+dataset.accessBtn.header.explore=Explore Options
+dataset.accessBtn.header.compute=Compute Options
+dataset.accessBtn.download.size=ZIP ({0})
+dataset.accessBtn.too.big=The dataset is too large to download. Please select the files you need from the files table.
+dataset.accessBtn.original.too.big=The dataset is too large to download in the original format. Please select the files you need from the files table.
+dataset.accessBtn.archival.too.big=The dataset is too large to download in the archival format. Please select the files you need from the files table.
+dataset.linkBtn=Link Dataset
+dataset.contactBtn=Contact Owner
+dataset.shareBtn=Share
+
+dataset.publishBtn=Publish Dataset
+dataset.editBtn=Edit Dataset
+
+dataset.editBtn.itemLabel.upload=Files (Upload)
+dataset.editBtn.itemLabel.metadata=Metadata
+dataset.editBtn.itemLabel.terms=Terms
+dataset.editBtn.itemLabel.permissions=Permissions
+dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets
+dataset.editBtn.itemLabel.privateUrl=Private URL
+dataset.editBtn.itemLabel.permissionsDataset=Dataset
+dataset.editBtn.itemLabel.permissionsFile=Restricted Files
+dataset.editBtn.itemLabel.deleteDataset=Delete Dataset
+dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version
+dataset.editBtn.itemLabel.deaccession=Deaccession Dataset
+dataset.exportBtn=Export Metadata
+dataset.exportBtn.itemLabel.ddi=DDI
+dataset.exportBtn.itemLabel.dublinCore=Dublin Core
+dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD
+dataset.exportBtn.itemLabel.datacite=DataCite
+dataset.exportBtn.itemLabel.json=JSON
+dataset.exportBtn.itemLabel.oai_ore=OAI_ORE
+dataset.exportBtn.itemLabel.dataciteOpenAIRE=OpenAIRE
+dataset.exportBtn.itemLabel.html=DDI HTML Codebook
+metrics.title=Metrics
+metrics.title.tip=View more metrics information
+metrics.dataset.title=Dataset Metrics
+metrics.dataset.tip.default=Aggregated metrics for this dataset.
+metrics.dataset.tip.makedatacount=Metrics collected using Make Data Count standards.
+metrics.dataset.views.tip=Dataset views are combined with both aggregated file views and file downloads.
+metrics.dataset.downloads.default.tip=Total aggregated downloads of files in this dataset.
+metrics.dataset.downloads.makedatacount.tip=Each file downloaded is counted as 1, and added to the total download count.
+metrics.dataset.citations.tip=Click for a list of citation URLs.
+metrics.file.title=File Metrics
+metrics.file.tip.default=Metrics for this individual file.
+metrics.file.tip.makedatacount=Individual file downloads are tracked in Dataverse but are not reported as part of the Make Data Count standard.
+metrics.file.downloads.tip=Total downloads of this file.
+metrics.views={0, choice, 0#Views|1#View|2#Views}
+metrics.downloads={0, choice, 0#Downloads|1#Download|2#Downloads}
+metrics.citations={0, choice, 0#Citations|1#Citation|2#Citations}
+metrics.citations.dialog.header=Dataset Citations
+metrics.citations.dialog.help=Citations for this dataset are retrieved from Crossref via DataCite using Make Data Count standards. For more information about dataset metrics, please refer to the User Guide.
+metrics.citations.dialog.empty=Sorry, no citations were found.
+dataset.publish.btn=Publish
+dataset.publish.header=Publish Dataset
+dataset.rejectBtn=Return to Author
+dataset.submitBtn=Submit for Review
+dataset.disabledSubmittedBtn=Submitted for Review
+dataset.submitMessage=You will not be able to make changes to this dataset while it is in review.
+dataset.submit.success=Your dataset has been submitted for review.
+dataset.inreview.infoMessage=The draft version of this dataset is currently under review prior to publication.
+dataset.submit.failure=Dataset Submission Failed - {0}
+dataset.submit.failure.null=Can't submit for review. Dataset is null.
+dataset.submit.failure.isReleased=Latest version of dataset is already released. Only draft versions can be submitted for review.
+dataset.submit.failure.inReview=You cannot submit this dataset for review because it is already in review.
+dataset.rejectMessage=Return this dataset to contributor for modification.
+dataset.rejectMessage.label=Return to Author Reason
+dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s).
+dataset.reject.enterReason.error=Reason for return to author is required.
+dataset.reject.success=This dataset has been sent back to the contributor.
+dataset.reject.failure=Dataset Submission Return Failed - {0}
+dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null.
+dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first.
+dataset.publish.tip=Are you sure you want to publish this dataset? Once you do so it must remain published.
+dataset.publishBoth.tip=Once you publish this dataset it must remain published.
+dataset.unregistered.tip= This dataset is unregistered. We will attempt to register it before publishing.
+dataset.republish.tip=Are you sure you want to republish this dataset?
+dataset.selectVersionNumber=Select if this is a minor or major version update.
+dataset.updateRelease=Update Current Version (will permanently overwrite the latest published version)
+dataset.majorRelease=Major Release
+dataset.minorRelease=Minor Release
+dataset.majorRelease.tip=Due to the nature of changes to the current draft this will be a major release ({0})
+dataset.mayNotBePublished=Cannot publish dataset.
+dataset.mayNotPublish.administrator= This dataset cannot be published until {0} is published by its administrator.
+dataset.mayNotPublish.both= This dataset cannot be published until {0} is published. Would you like to publish both right now?
+dataset.mayNotPublish.twoGenerations= This dataset cannot be published until {0} and {1} are published.
+dataset.mayNotBePublished.both.button=Yes, Publish Both
+dataset.viewVersion.unpublished=View Unpublished Version
+dataset.viewVersion.published=View Published Version
+dataset.link.title=Link Dataset
+dataset.link.save=Save Linked Dataset
+dataset.link.not.to.owner=Can't link a dataset to its dataverse
+dataset.link.not.to.parent.dataverse=Can't link a dataset to its parent dataverses
+dataset.link.not.published=Can't link a dataset that has not been published
+dataset.link.not.available=Can't link a dataset that has not been published or is not harvested
+dataset.link.not.already.linked=Can't link a dataset that has already been linked to this dataverse
+dataset.email.datasetContactTitle=Contact Dataset Owner
+dataset.email.hiddenMessage=
+dataset.email.messageSubject=Test Message Subject
+dataset.email.datasetLinkBtn.tip=Link Dataset to Your Dataverse
+dataset.share.datasetShare=Share Dataset
+dataset.share.datasetShare.tip=Share this dataset on your favorite social media networks.
+dataset.share.datasetShare.shareText=View this dataset.
+dataset.locked.message=Dataset Locked
+dataset.locked.message.details=This dataset is locked until publication.
+dataset.locked.inReview.message=Submitted for Review
+dataset.locked.ingest.message=The tabular data files uploaded are being processed and converted into the archival format
+dataset.unlocked.ingest.message=The tabular files have been ingested.
+dataset.locked.editInProgress.message=Edit In Progress
+dataset.locked.editInProgress.message.details=Additional edits cannot be made at this time. Contact {0} if this status persists.
+dataset.locked.pidNotReserved.message=Dataset DOI Not Reserved
+dataset.locked.pidNotReserved.message.details=The DOI displayed in the citation for this dataset has not yet been reserved with DataCite. Please do not share this DOI until it has been reserved.
+dataset.publish.error=This dataset may not be published due to an error when contacting the {0} Service. Please try again.
+dataset.publish.error.doi=This dataset may not be published because the DOI update failed.
+dataset.publish.file.validation.error.message=Failed to Publish Dataset
+dataset.publish.file.validation.error.details=The dataset could not be published because one or more of the datafiles in the dataset could not be validated (physical file missing, checksum mismatch, etc.)
+dataset.publish.file.validation.error.contactSupport=The dataset could not be published because one or more of the datafiles in the dataset could not be validated (physical file missing, checksum mismatch, etc.) Please contact support for further assistance.
+dataset.publish.file.validation.error.noChecksumType=Checksum type not defined for datafile id {0}
+dataset.publish.file.validation.error.failRead=Failed to open datafile id {0} for reading
+dataset.publish.file.validation.error.failCalculateChecksum=Failed to calculate checksum for datafile id {0}
+dataset.publish.file.validation.error.wrongChecksumValue=Checksum mismatch for datafile id {0}
+dataset.compute.computeBatchSingle=Compute Dataset
+dataset.compute.computeBatchList=List Batch
+dataset.compute.computeBatchAdd=Add to Batch
+dataset.compute.computeBatchClear=Clear Batch
+dataset.compute.computeBatchRemove=Remove from Batch
+dataset.compute.computeBatchCompute=Compute Batch
+dataset.compute.computeBatch.success=The list of datasets in your compute batch has been updated.
+dataset.compute.computeBatch.failure=The list of datasets in your compute batch failed to be updated. Please try again.
+dataset.compute.computeBtn=Compute
+dataset.compute.computeBatchListHeader=Compute Batch
+dataset.compute.computeBatchRestricted=This dataset contains restricted files you may not compute on because you have not been granted access.
+dataset.delete.error=Could not deaccession the dataset because the {0} update failed.
+dataset.publish.workflow.message=Publish in Progress
+dataset.publish.workflow.inprogress=This dataset is locked until publication.
+dataset.pidRegister.workflow.inprogress=The dataset is locked while the persistent identifiers are being registered or updated, and/or the physical files are being validated.
+dataset.versionUI.draft=Draft
+dataset.versionUI.inReview=In Review
+dataset.versionUI.unpublished=Unpublished
+dataset.versionUI.deaccessioned=Deaccessioned
+dataset.cite.title.released=DRAFT VERSION will be replaced in the citation with V1 once the dataset has been published.
+dataset.cite.title.draft=DRAFT VERSION will be replaced in the citation with the selected version once the dataset has been published.
+dataset.cite.title.deassessioned=DEACCESSIONED VERSION has been added to the citation for this version since it is no longer available.
+dataset.cite.standards.tip=Learn about Data Citation Standards.
+dataset.cite.downloadBtn=Cite Dataset
+dataset.cite.downloadBtn.xml=EndNote XML
+dataset.cite.downloadBtn.ris=RIS
+dataset.cite.downloadBtn.bib=BibTeX
+dataset.create.authenticatedUsersOnly=Only authenticated users can create datasets.
+dataset.deaccession.reason=Deaccession Reason
+dataset.beAccessedAt=The dataset can now be accessed at:
+dataset.descriptionDisplay.title=Description
+dataset.keywordDisplay.title=Keyword
+dataset.subjectDisplay.title=Subject
+dataset.contact.tip=Use email button above to contact.
+dataset.asterisk.tip=Asterisks indicate required fields
+dataset.message.uploadFiles.label=Upload Dataset Files
+dataset.message.uploadFilesSingle.message=For more information about supported file formats, please refer to the User Guide.
+dataset.message.uploadFilesMultiple.message=Multiple file upload/download methods are available for this dataset. Once you upload a file using one of these methods, your choice will be locked in for this dataset.
+dataset.message.editMetadata.label=Edit Dataset Metadata
+dataset.message.editMetadata.message=Add more metadata about this dataset to help others easily find it.
+dataset.message.editMetadata.duplicateFilenames=Duplicate filenames: {0}
+dataset.message.editTerms.label=Edit Dataset Terms
+dataset.message.editTerms.message=Add the terms of use for this dataset to explain how to access and use your data.
+dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock.
+dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock.
+dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock.
+dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock.
+dataset.message.locked.publishNotAllowed=Dataset cannot be published due to dataset lock.
+dataset.message.createSuccess=This dataset has been created.
+dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again.
+dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again.
+dataset.message.linkSuccess= {0} has been successfully linked to {1}.
+dataset.message.metadataSuccess=The metadata for this dataset has been updated.
+dataset.message.termsSuccess=The terms for this dataset have been updated.
+dataset.message.filesSuccess=The files for this dataset have been updated.
+dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try uploading the file(s) again.
+dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again.
+dataset.message.publish.remind.draft=If it's ready for sharing, please publish it.
+dataset.message.submit.remind.draft=If it's ready for sharing, please submit it for review.
+dataset.message.publish.remind.version=If it's ready for sharing, please publish it so that others can see these changes.
+dataset.message.submit.remind.version=If it's ready for sharing, please submit it for review so that others can see these changes.
+dataset.message.publishSuccess=This dataset has been published.
+dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets.
+dataset.message.deleteSuccess=This dataset has been deleted.
+dataset.message.bulkFileUpdateSuccess=The selected files have been updated.
+dataset.message.bulkFileDeleteSuccess=The selected files have been deleted.
+datasetVersion.message.deleteSuccess=This dataset draft has been deleted.
+datasetVersion.message.deaccessionSuccess=The selected version(s) have been deaccessioned.
+dataset.message.deaccessionSuccess=This dataset has been deaccessioned.
+dataset.message.publishFailure=The dataset could not be published.
+dataset.message.metadataFailure=The metadata could not be updated.
+dataset.message.filesFailure=The files could not be updated.
+dataset.message.bulkFileDeleteFailure=The selected files could not be deleted.
+dataset.message.files.ingestFailure=The file(s) could not be ingested.
+dataset.message.deleteFailure=This dataset draft could not be deleted.
+dataset.message.deaccessionFailure=This dataset could not be deaccessioned.
+dataset.message.createFailure=The dataset could not be created.
+dataset.message.termsFailure=The dataset terms could not be updated.
+dataset.message.label.fileAccess=File Access
+dataset.message.publicInstall=Files are stored on a publicly accessible storage server.
+dataset.metadata.publicationDate=Publication Date
+dataset.metadata.publicationDate.tip=The publication date of a dataset.
+dataset.metadata.publicationYear=Publication Year
+dataset.metadata.publicationYear.tip=The publication year of a dataset.
+dataset.metadata.persistentId=Dataset Persistent ID
+dataset.metadata.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse.
+dataset.metadata.alternativePersistentId=Previous Dataset Persistent ID
+dataset.metadata.alternativePersistentId.tip=A previously used persistent identifier for a dataset, which can be a Handle or DOI in Dataverse.
+file.metadata.preview=Preview
+file.metadata.filetags=File Tags
+file.metadata.persistentId=File Persistent ID
+file.metadata.persistentId.tip=The unique persistent identifier for a file, which can be a Handle or DOI in Dataverse.
+dataset.versionDifferences.termsOfUseAccess=Terms of Use and Access
+dataset.versionDifferences.termsOfUseAccessChanged=Terms of Use/Access Changed
+dataset.versionDifferences.metadataBlock=Metadata Block
+dataset.versionDifferences.field=Field
+dataset.versionDifferences.changed=Changed
+dataset.versionDifferences.from=From
+dataset.versionDifferences.to=To
+file.viewDiffDialog.fileAccess=Access
+dataset.host.tip=Changing the host dataverse will clear any fields you may have entered data into.
+dataset.template.tip=Changing the template will clear any fields you may have entered data into.
+dataset.noTemplate.label=None
+dataset.noSelectedFiles.header=Select File(s)
+dataset.noSelectedFiles=Please select one or more files.
+dataset.noSelectedFilesForDownload=Please select a file or files to be downloaded.
+dataset.noSelectedFilesForRequestAccess=Please select a file or files for access request.
+dataset.inValidSelectedFilesForDownload=Restricted Files Selected
+dataset.noValidSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access.
+dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access.
+dataset.downloadUnrestricted=Click Continue to download the files you have access to download.
+
+dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button.
+dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0}
+dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. You will not be able to access it when logged into your Dataverse account.
+dataset.privateurl.header=Unpublished Dataset Private URL
+dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide.
+dataset.privateurl.absent=Private URL has not been created.
+dataset.privateurl.createPrivateUrl=Create Private URL
+dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access
+dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published
+dataset.privateurl.disablePrivateUrl=Disable Private URL
+dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL
+dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset.
+dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets.
+dataset.privateurl.roleassigeeTitle=Private URL Enabled
+dataset.privateurl.createdSuccess=Success!
+dataset.privateurl.full=This Private URL provides full read access to the dataset
+dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset
+dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset.
+dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}.
+file.display.label=Change View
+file.display.table=Table
+file.display.tree=Tree
+file.count.label=File Count
+file.count.one=1 File
+file.count={0} to {1} of {2} {2, choice, 0#Files|1#File|2#Files}
+file.count.shown={0} {0, choice, 0#Files Selected|1#File|2#Files}
+file.clearSelection=Clear selection.
+file.zip.download.exceeds.limit=The overall size of the files selected ({0}) for download exceeds the zip limit of {1}. Please unselect some files to continue.
+file.zip.download.exceeds.limit.info=The files selected are too large to download as a ZIP.
+file.zip.download.exceeds.limit.detail=You can select individual files that are below the {2} download limit from the files table, or use the Data Access API for programmatic access to the files.
+file.zip.download.exceeds.limit.header=Download Options
+file.numFilesSelected={0} {0, choice, 0#files are|1#file is|2#files are} currently selected.
+file.select.tooltip=Select Files
+file.selectAllFiles=Select all {0} files in this dataset.
+file.dynamicCounter.filesPerPage=Files Per Page
+file.selectToAddBtn=Select Files to Add
+file.selectToAdd.tipLimit=File upload limit is {0} per file.
+file.selectToAdd.tipMoreInformation=Select files or drag and drop into the upload widget.
+file.selectToAdd.dragdropMsg=Drag and drop files here.
+file.createUploadDisabled=Upload files using rsync via SSH. This method is recommended for large file transfers. The upload script will be available on the Upload Files page once you save this dataset.
+file.fromHTTP=Upload with HTTP via your browser
+file.fromDropbox=Upload from Dropbox
+file.fromDropbox.tip=Select files from Dropbox.
+file.fromRsync=Upload with rsync + SSH via Data Capture Module (DCM)
+file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse.
+file.api.alreadyHasPackageFile=File upload via HTTP disabled since this dataset already contains a package file.
+file.replace.original=Original File
+file.editFiles=Edit Files
+file.editFilesSelected=Edit
+file.editFile=Edit
+
+file.actionsBlock=File Actions
+file.accessBtn=Access File
+file.accessBtn.header.download=Download Options
+file.optionsBtn=File Options
+file.optionsBtn.header.edit=Edit Options
+file.optionsBtn.header.configure=Configure Options
+file.editBtn=Edit File
+file.contactBtn=Contact Owner
+file.shareBtn=Share
+file.share.title=Share File
+file.share.tip=Share this file on your favorite social media networks.
+file.share.text=View this file.
+file.bulkUpdate=Bulk Update
+file.uploadFiles=Upload Files
+file.replaceFile=Replace File
+file.notFound.tip=There are no files in this dataset.
+file.notFound.search=There are no files that match your search. Please change the search terms and try again.
+file.noSelectedFiles.tip=There are no selected files to display.
+file.noUploadedFiles.tip=Files you upload will appear here.
+file.replace=Replace
+file.alreadyDeleted.warning.header=Edit File
+file.alreadyDeleted.previous.warningMessage=This file has already been deleted (or replaced) in the current version. It may not be edited.
+file.delete=Delete
+file.delete.duplicate.multiple=Delete Duplicate Files
+file.delete.duplicate.single=Delete Duplicate File
+file.metadata=Metadata
+file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button.
+file.deleted.replacement.success=The replacement file has been deleted.
+file.deleted.upload.success.single=File has been deleted and won\u2019t be included in this upload.
+file.deleted.upload.success.multiple=Files have been deleted and won\u2019t be included in this upload.
+file.editAccess=Edit Access
+file.restrict=Restrict
+file.unrestrict=Unrestrict
+file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button.
+file.download.header=Download
+file.download.subset.header=Download Data Subset
+file.preview=Preview:
+file.fileName=File Name
+file.type.tabularData=Tabular Data
+file.originalChecksumType=Original File {0}
+file.checksum.exists.tip=A file with this checksum already exists in the dataset.
+file.selectedThumbnail=Thumbnail
+file.selectedThumbnail.tip=The thumbnail for this file is used as the default thumbnail for the dataset. Click 'Advanced Options' button of another file to select that file.
+file.cloudStorageAccess=Cloud Storage Access
+file.cloudStorageAccess.tip=The container name for this dataset needed to access files in cloud storage.
+file.cloudStorageAccess.help=To directly access this data in the {2} cloud environment, use the container name in the Cloud Storage Access box below. To learn more about the cloud environment, visit the Cloud Storage Access section of the User Guide.
+file.copy=Copy
+file.compute=Compute
+file.rsyncUpload.info=Upload files using rsync + SSH. This method is recommended for large file transfers. Follow the steps below to upload your data. (User Guide - rsync Upload).
+file.rsyncUpload.filesExist=You cannot upload additional files to this dataset. A dataset can only hold one data package. If you need to replace the data package in this dataset, please contact {0}.
+file.rsyncUpload.noScriptBroken=The Data Capture Module failed to generate the rsync script. Please contact {0}.
+file.rsyncUpload.noScriptBusy=Currently generating rsync script. If the script takes longer than ten minutes to generate, please contact {0}.
+file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset.
+file.rsyncUpload.step2=Download this file upload script:
+file.rsyncUpload.step2.downloadScriptButton=Download DCM Script
+file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0}
+file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days.
+file.rsyncUpload.inProgressMessage.summary=File Upload in Progress
+file.rsyncUpload.inProgressMessage.details=This dataset is locked while the data files are being transferred and verified.
+file.rsyncUpload.httpUploadDisabledDueToRsyncFileExisting=HTTP upload is disabled for this dataset because you have already uploaded files via rsync. If you would like to switch to HTTP upload, please contact {0}.
+file.rsyncUpload.httpUploadDisabledDueToRsyncFileExistingAndPublished=HTTP upload is disabled for this dataset because you have already uploaded files via rsync and published the dataset.
+file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttp=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP. If you would like to switch to rsync upload, then you must first remove all uploaded files from this dataset. Once this dataset is published, the chosen upload method is permanently locked in.
+file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttpAndPublished=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP and published the dataset.
+file.metaData.checksum.copy=Click to copy
+file.metaData.dataFile.dataTab.unf=UNF
+file.metaData.dataFile.dataTab.variables=Variables
+file.metaData.dataFile.dataTab.observations=Observations
+file.metaData.fileAccess=File Access:
+file.addDescription=Add file description...
+file.tags=Tags
+file.editTags=Edit Tags
+file.editTagsDialog.tip=Select existing file tags or create new tags to describe your files. Each file can have more than one tag.
+file.editTagsDialog.select=File Tags
+file.editTagsDialog.selectedTags=Selected Tags
+file.editTagsDialog.selectedTags.none=No tags selected
+file.editTagsDialog.add=Custom File Tag
+file.editTagsDialog.add.tip=Creating a new tag will add it as a tag option for all files in this dataset.
+file.editTagsDialog.newName=Add new file tag...
+dataset.removeUnusedFileTags.label=Delete Tags
+dataset.removeUnusedFileTags.tip=Select to delete Custom File Tags not used by the files in the dataset.
+dataset.removeUnusedFileTags.check=Delete tags not being used
+file.setThumbnail=Set Thumbnail
+file.setThumbnail.header=Set Dataset Thumbnail
+file.datasetThumbnail=Dataset Thumbnail
+file.datasetThumbnail.tip=Select to use this image as the thumbnail image that is displayed in the search results for this dataset.
+file.setThumbnail.confirmation=Are you sure you want to set this image as your dataset thumbnail? There is already an image uploaded to be the thumbnail and this action will remove it.
+file.useThisIamge=Use this image as the dataset thumbnail image
+file.advancedOptions=Advanced Options
+file.advancedIngestOptions=Advanced Ingest Options
+file.assignedDataverseImage.success={0} has been saved as the thumbnail for this dataset.
+file.assignedTabFileTags.success=The tags were successfully added for {0}.
+file.tabularDataTags=Tabular Data Tags
+file.tabularDataTags.tip=Select a tag to describe the type(s) of data this is (survey, time series, geospatial, etc).
+file.spss-savEncoding=Language Encoding
+file.spss-savEncoding.title=Select the language used for encoding this SPSS (sav) Data file.
+file.spss-savEncoding.current=Current Selection:
+file.spss-porExtraLabels=Variable Labels
+file.spss-porExtraLabels.title=Upload an additional text file with extra variable labels.
+file.spss-porExtraLabels.selectToAddBtn=Select File to Add
+file.ingestFailed.header=Upload Completed with Errors
+file.ingestFailed.message=Tabular data ingest failed.
+file.downloadBtn.format.all=All File Formats + Information
+file.downloadBtn.format.tab=Tab-Delimited
+file.downloadBtn.format.original={0} (Original File Format)
+file.downloadBtn.format.rdata=RData
+file.downloadBtn.format.var=Variable Metadata
+file.downloadBtn.format.citation=Data File Citation
+file.download.filetype.unknown=Original File Format
+file.more.information.link=Link to more file information for
+file.requestAccess=Request Access
+file.requestAccess.dialog.msg=You need to Log In to request access.
+#file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
+#UB
+fietAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
+
+.accessRequested=Access Requested
+file.ingestInProgress=Ingest in progress...
+file.dataFilesTab.metadata.header=Metadata
+file.dataFilesTab.metadata.addBtn=Add + Edit Metadata
+file.dataFilesTab.terms.header=Terms
+file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements
+file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use
+file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver
+file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset.
+file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication"
+file.cc0.icon.alttxt=Creative Commons CC0 1.0 Public Domain Dedication icon
+file.dataFilesTab.terms.list.termsOfUse.waiver.description=Datasets will default to a CC0 public domain dedication . CC0 facilitates reuse and extensibility of research data. Our Community Norms as well as good scientific practices expect that proper credit is given via citation. If you are unable to give datasets a CC0 waiver you may enter custom Terms of Use for datasets.
+file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No waiver has been selected for this dataset.
+file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Our Community Norms as well as good scientific practices expect that proper credit is given via citation. Please use the data citation above, generated by the Dataverse.
+file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Yes, apply CC0 - "Public Domain Dedication"
+file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, do not apply CC0 - "Public Domain Dedication"
+file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=This is what end users will see displayed on this dataset
+file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Terms of Use
+file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Outlines how this data can be used once downloaded.
+file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=If you are unable to use CC0 for datasets you are able to set custom terms of use. Here is an example of a Data Usage Agreement for datasets that have de-identified human subject data.
+file.dataFilesTab.terms.list.termsOfUse.addInfo=Additional Information
+file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Confidentiality Declaration
+file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indicates whether signing of a confidentiality declaration is needed to access a resource.
+file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Special Permissions
+file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Determine if any special permissions are required to access a resource (e.g., if form is a needed and where to access the form).
+file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restrictions
+file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Any restrictions on access to or use of the collection, such as privacy certification or distribution restrictions, should be indicated here. These can be restrictions applied by the author, producer, or disseminator of the data collection. If the data are restricted to only a certain class of user, specify which type.
+file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Citation Requirements
+file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=Include special/explicit citation requirements for data to be cited properly in articles or other publications that are based on analysis of the data. For standard data citation requirements refer to our Community Norms.
+file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Depositor Requirements
+file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Information regarding user responsibility for informing Dataset Depositors, Authors or Curators of their use of data through providing citations to the published work or providing copies of the manuscripts.
+file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions
+file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Any additional information that will assist the user in understanding the access and use conditions of the Dataset.
+file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Disclaimer
+file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Information regarding responsibility for uses of the Dataset.
+file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Access
+file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files
+file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset.
+file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset.
+file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access
+file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can gain access to the restricted files in this dataset.
+file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access
+file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset.
+file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Users may request access to files.
+file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Users may not request access to files.
+file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Enable access request
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Data Access Place
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=If the data is not only in Dataverse, list the location(s) where the data are currently stored.
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Original Archive
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Archive from which the data was obtained.
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=Availability Status
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=Statement of Dataset availability. A depositor may need to indicate that a Dataset is unavailable because it is embargoed for a period of time, because it has been superseded, because a new edition is imminent, etc.
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Contact for Access
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=If different from the Dataset Contact, this is the Contact person or organization (include email or full address, and telephone number if available) that controls access to a collection.
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Size of Collection
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Summary of the number of physical files that exist in a Dataset, recording the number of files that contain data and noting whether the collection contains machine readable documentation and/or other supplementary files and information, such as code, data dictionaries, data definition statements, or data collection instruments.
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Study Completion
+file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relationship of the data collected to the amount of data coded and stored in the Dataset. Information as to why certain items of collected information were not included in the dataset or a specific data file should be provided.
+file.dataFilesTab.terms.list.guestbook=Guestbook
+file.dataFilesTab.terms.list.guestbook.title=User information (i.e., name, email, institution, and position) will be collected when files are downloaded.
+file.dataFilesTab.terms.list.guestbook.noSelected.tip=No guestbook is assigned to this dataset, you will not be prompted to provide any information on file download.
+file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=There are no guestbooks available in {0} to assign to this dataset.
+file.dataFilesTab.terms.list.guestbook.inUse.tip=The following guestbook will prompt a user to provide additional information when downloading a file.
+file.dataFilesTab.terms.list.guestbook.viewBtn=Preview Guestbook
+file.dataFilesTab.terms.list.guestbook.select.tip=Select a guestbook to have a user provide additional information when downloading a file.
+file.dataFilesTab.terms.list.guestbook.noAvailable.tip=There are no guestbooks enabled in {0}. To create a guestbook, return to {0}, click the "Edit" button and select the "Dataset Guestbooks" option.
+file.dataFilesTab.terms.list.guestbook.clearBtn=Clear Selection
+
+file.dataFilesTab.dataAccess=Data Access
+file.dataFilesTab.dataAccess.info=This data file can be accessed through a terminal window, using the commands below. For more information about downloading and verifying data, see our User Guide.
+file.dataFilesTab.dataAccess.info.draft=Data files can not be accessed until the dataset draft has been published. For more information about downloading and verifying data, see our User Guide.
+file.dataFilesTab.dataAccess.local.label=Local Access
+file.dataFilesTab.dataAccess.download.label=Download Access
+file.dataFilesTab.dataAccess.verify.label=Verify Data
+file.dataFilesTab.dataAccess.local.tooltip=If this data is locally available to you, this is its file path.
+file.dataFilesTab.dataAccess.download.tooltip=Download this data from your preferred mirror by running this command.
+file.dataFilesTab.dataAccess.verify.tooltip=This command runs a checksum to verify the integrity of the data you have downloaded.
+file.dataFilesTab.button.direct=Direct
+
+file.dataFilesTab.versions=Versions
+file.dataFilesTab.versions.headers.dataset=Dataset
+file.dataFilesTab.versions.headers.summary=Summary
+file.dataFilesTab.versions.headers.contributors=Contributors
+file.dataFilesTab.versions.headers.contributors.withheld=Contributor name(s) withheld
+file.dataFilesTab.versions.headers.published=Published
+file.dataFilesTab.versions.viewDiffBtn=View Differences
+file.dataFilesTab.versions.citationMetadata=Citation Metadata:
+file.dataFilesTab.versions.added=Added
+file.dataFilesTab.versions.removed=Removed
+file.dataFilesTab.versions.changed=Changed
+file.dataFilesTab.versions.replaced=Replaced
+file.dataFilesTab.versions.original=Original
+file.dataFilesTab.versions.replacment=Replacement
+file.dataFilesTab.versions.additionalCitationMetadata=Additional Citation Metadata:
+file.dataFilesTab.versions.description.draft=This is a draft version.
+file.dataFilesTab.versions.description.deaccessioned=Due to the previous version being deaccessioned, there are no difference notes available for this published version.
+file.dataFilesTab.versions.description.firstPublished=This is the first published version.
+file.dataFilesTab.versions.description.deaccessionedReason=Deaccessioned Reason:
+file.dataFilesTab.versions.description.beAccessedAt=The dataset can now be accessed at:
+file.dataFilesTab.versions.viewDetails.btn=View Details
+file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about the versions of this dataset, and to edit it if this is your dataset, please visit the full version of this dataset at the {2}.
+file.dataFilesTab.versions.preloadmessage=(Loading versions...)
+file.previewTab.externalTools.header=Available Previews
+file.previewTab.button.label=Preview
+file.previewTab.previews.not.available=Public previews are not available for this file.
+file.deleteDialog.tip=Are you sure you want to delete this dataset and all of its files? You cannot undelete this dataset.
+file.deleteDialog.header=Delete Dataset
+file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? Files will be reverted to the most recently published version. You cannot undelete this draft.
+file.deleteDraftDialog.header=Delete Draft Version
+file.deleteFileDialog.tip=The file(s) will be deleted after you click on the Save Changes button on the bottom of this page.
+file.deleteFileDialog.immediate=The file will be deleted after you click on the Delete button.
+file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you click on the Delete button.
+file.deleteFileDialog.header=Delete Files
+file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset.
+file.deaccessionDialog.tip=Once you deaccession this dataset it will no longer be viewable by the public.
+file.deaccessionDialog.version=Version
+file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession?
+file.deaccessionDialog.reason.question2=What is the reason for deaccession?
+file.deaccessionDialog.reason.selectItem.identifiable=There is identifiable data in one or more files.
+file.deaccessionDialog.reason.selectItem.beRetracted=The research article has been retracted.
+file.deaccessionDialog.reason.selectItem.beTransferred=The dataset has been transferred to another repository.
+file.deaccessionDialog.reason.selectItem.IRB=IRB request.
+file.deaccessionDialog.reason.selectItem.legalIssue=Legal issue or Data Usage Agreement.
+file.deaccessionDialog.reason.selectItem.notValid=Not a valid dataset.
+file.deaccessionDialog.reason.selectItem.other=Other (Please type reason in space provided below)
+file.deaccessionDialog.enterInfo=Please enter additional information about the reason for deaccession.
+file.deaccessionDialog.leaveURL=If applicable, please leave a URL where this dataset can be accessed after deaccessioning.
+file.deaccessionDialog.leaveURL.watermark=Optional dataset site, http://...
+file.deaccessionDialog.deaccession.tip=Are you sure you want to deaccession? The selected version(s) will no longer be viewable by the public.
+file.deaccessionDialog.deaccessionDataset.tip=Are you sure you want to deaccession this dataset? It will no longer be viewable by the public.
+file.deaccessionDialog.dialog.selectVersion.error=Please select version(s) for deaccessioning.
+file.deaccessionDialog.dialog.reason.error=Please select reason for deaccessioning.
+file.deaccessionDialog.dialog.url.error=Please enter valid forwarding URL.
+file.deaccessionDialog.dialog.textForReason.error=Please enter text for reason for deaccessioning.
+file.deaccessionDialog.dialog.limitChar.error=Text for reason for deaccessioning may be no longer than {0} characters.
+file.viewDiffDialog.header=Version Differences Details
+file.viewDiffDialog.dialog.warning=Please select two versions to view the differences.
+file.viewDiffDialog.notAvailable=N/A
+file.viewDiffDialog.version=Version
+file.viewDiffDialog.lastUpdated=Last Updated
+file.viewDiffDialog.fileID=File ID
+file.viewDiffDialog.fileName=Name
+file.viewDiffDialog.fileType=Type
+file.viewDiffDialog.fileSize=Size
+file.viewDiffDialog.category=Tags
+file.viewDiffDialog.description=Description
+file.viewDiffDialog.provDescription=Provenance Description
+file.viewDiffDialog.fileReplaced=File Replaced
+file.viewDiffDialog.filesReplaced=File(s) Replaced
+file.viewDiffDialog.files.header=Files
+file.viewDiffDialog.msg.draftFound= This is the "DRAFT" version.
+file.viewDiffDialog.msg.draftNotFound=The "DRAFT" version was not found.
+file.viewDiffDialog.msg.versionFound= This is version "{0}".
+file.viewDiffDialog.msg.versionNotFound=Version "{0}" was not found.
+file.metadataTip=Metadata Tip: After adding the dataset, click the Edit Dataset button to add more metadata.
+file.addBtn=Save Dataset
+file.dataset.allFiles=All Files from this Dataset
+file.downloadDialog.header=Dataset Terms
+file.downloadDialog.tip=Please confirm and/or complete the information needed below in order to continue.
+file.requestAccessTermsDialog.tip=Please confirm and/or complete the information needed below in order to request access to files in this dataset.
+file.requestAccess.notAllowed=Requests for access are not accepted on the Dataset.
+file.requestAccess.notAllowed.alreadyHasDownloadPermisssion=User already has permission to download this file. Request Access is invalid.
+
+file.search.placeholder=Search this dataset...
+file.results.filter=Filter by
+file.results.filter.type=File Type:
+file.results.filter.access=Access:
+file.results.filter.tag=File Tag:
+file.results.filter.all=All
+file.results.btn.sort=Sort
+file.results.btn.sort.option.nameAZ=Name (A-Z)
+file.results.btn.sort.option.nameZA=Name (Z-A)
+file.results.btn.sort.option.newest=Newest
+file.results.btn.sort.option.oldest=Oldest
+file.results.btn.sort.option.size=Size
+file.results.btn.sort.option.type=Type
+file.compute.fileAccessDenied=This file is restricted and you may not compute on it because you have not been granted access.
+file.configure.Button=Configure
+
+file.auxfiles.download.header=Download Auxiliary Files
+# These types correspond to the AuxiliaryFile.Type enum.
+file.auxfiles.types.DP=Differentially Private Statistics
+# Add more types here
+file.auxfiles.unspecifiedTypes=Other Auxiliary Files
+
+# dataset-widgets.xhtml
+dataset.widgets.title=Dataset Thumbnail + Widgets
+dataset.widgets.notPublished.why.header=Why Use Widgets?
+dataset.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website.
+dataset.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website.
+dataset.widgets.notPublished.how.header=How To Use Widgets
+dataset.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published.
+dataset.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website.
+dataset.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here.
+dataset.widgets.notPublished.getStarted=To get started, publish your dataset. To learn more about Widgets, visit the Widgets section of the User Guide.
+dataset.widgets.editAdvanced=Edit Advanced Options
+dataset.widgets.editAdvanced.tip=Advanced Options Additional options for configuring your widget on your personal or project website.
+dataset.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Widgets section of the User Guide.
+dataset.widgets.citation.txt=Dataset Citation
+dataset.widgets.citation.tip=Add a citation for your dataset to your personal or project website.
+dataset.widgets.datasetFull.txt=Dataset
+dataset.widgets.datasetFull.tip=Add a way for visitors on your website to be able to view your datasets, download files, etc.
+dataset.widgets.advanced.popup.header=Widget Advanced Options
+dataset.widgets.advanced.prompt=Forward persistent URL's in your dataset citation to your personal website.
+dataset.widgets.advanced.url.label=Personal Website URL
+dataset.widgets.advanced.url.watermark=http://www.example.com/page-name
+dataset.widgets.advanced.invalid.message=Please enter a valid URL
+dataset.widgets.advanced.success.message=Successfully updated your Personal Website URL
+dataset.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated.
+dataset.thumbnailsAndWidget.breadcrumbs.title=Thumbnail + Widgets
+dataset.thumbnailsAndWidget.thumbnails.title=Thumbnail
+dataset.thumbnailsAndWidget.widgets.title=Widgets
+dataset.thumbnailsAndWidget.thumbnailImage=Thumbnail Image
+dataset.thumbnailsAndWidget.thumbnailImage.title=The logo or image file you wish to display as the thumbnail of this dataset.
+dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG, TIF, or PNG and should be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 48 pixels wide by 48 pixels high.
+dataset.thumbnailsAndWidget.thumbnailImage.default=Default Icon
+dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Select Available File
+dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Select Thumbnail
+dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Select a thumbnail from those available as image data files that belong to your dataset.
+dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Upload New File
+dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Upload an image file as your dataset thumbnail, which will be stored separately from the data files that belong to your dataset.
+dataset.thumbnailsAndWidget.thumbnailImage.upload=Upload Image
+dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file.
+dataset.thumbnailsAndWidget.thumbnailImage.alt=Thumbnail image selected for dataset
+dataset.thumbnailsAndWidget.success=Dataset thumbnail updated.
+dataset.thumbnailsAndWidget.removeThumbnail=Remove Thumbnail
+dataset.thumbnailsAndWidget.removeThumbnail.tip=You are only removing this image as the dataset thumbnail, not removing it from your dataset. To do that, go to the Edit Files page.
+dataset.thumbnailsAndWidget.availableThumbnails=Available Thumbnails
+dataset.thumbnailsAndWidget.availableThumbnails.tip=Select a thumbnail from the data files that belong to your dataset. Continue back to the Thumbnail + Widgets page to save your changes.
+
+# file.xhtml
+file.share.fileShare=Share File
+file.share.fileShare.tip=Share this file on your favorite social media networks.
+file.share.fileShare.shareText=View this file.
+file.title.label=Title
+file.citation.label=Citation
+file.citation.notice=This file is part of "{0}".
+file.citation.dataset=Dataset Citation
+file.citation.datafile=File Citation
+file.cite.downloadBtn=Cite Dataset
+file.cite.file.downloadBtn=Cite Data File
+file.pid.label=File Persistent ID:
+file.unf.lable= File UNF:
+file.general.metadata.label=General Metadata
+file.description.label=Description
+file.tags.label=Tags
+file.lastupdated.label=Last Updated
+file.DatasetVersion=Version
+
+file.previewTab.tool.open=Open
+file.previewTab.header=Preview
+file.previewTab.presentation=File Preview Tool
+file.previewTab.openBtn=Open in New Window
+file.previewTab.exploreBtn={0} on {1}
+file.metadataTab.fileMetadata.header=File Metadata
+file.metadataTab.fileMetadata.persistentid.label=Data File Persistent ID
+file.metadataTab.fileMetadata.downloadUrl.label=Download URL
+file.metadataTab.fileMetadata.downloadUrl.info=Use the Download URL in a Wget command or a download manager to avoid interrupted downloads, time outs or other failures. User Guide - Downloading via URL
+file.metadataTab.fileMetadata.unf.label=File UNF
+file.metadataTab.fileMetadata.size.label=Size
+file.metadataTab.fileMetadata.type.label=Type
+file.metadataTab.fileMetadata.description.label=Description
+file.metadataTab.fileMetadata.publicationDate.label=Publication Date
+file.metadataTab.fileMetadata.depositDate.label=Deposit Date
+file.metadataTab.fileMetadata.hierarchy.label=File Path
+file.metadataTab.fileMetadata.hierarchy.tip=Hierarchical directory structure path used to display file organization and support reproducibility.
+file.metadataTab.fitsMetadata.header=FITS Metadata
+
+file.versionDifferences.noChanges=No changes associated with this version
+file.versionDifferences.fileNotInVersion=File not included in this version
+file.versionDifferences.actionChanged=Changed
+file.versionDifferences.actionAdded=Added
+file.versionDifferences.actionRemoved=Removed
+file.versionDifferences.actionReplaced=Replaced
+file.versionDifferences.fileMetadataGroupTitle=File Metadata
+file.versionDifferences.fileTagsGroupTitle=File Tags
+file.versionDifferences.descriptionDetailTitle=Description
+file.versionDifferences.provenanceDetailTitle=Provenance
+file.versionDifferences.fileNameDetailTitle=File Name
+file.versionDifferences.fileAccessTitle=File Access
+file.versionDifferences.fileRestricted=Restricted
+file.versionDifferences.fileUnrestricted=Unrestricted
+file.versionDifferences.fileGroupTitle=File
+
+file.anonymized.authorsWithheld=Author name(s) withheld
+# File Ingest
+ingest.csv.invalidHeader=Invalid header row. One of the cells is empty.
+ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second.
+ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found.
+ingest.csv.nullStream=Stream can't be null.
+
+# editdatafile.xhtml
+
+# editFilesFragment.xhtml
+file.edit.error.file_exceeds_limit=This file exceeds the size limit.
+# File metadata error
+file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non-tabular file.
+file.metadata.filedirectory.invalidCharacters=Directory Name cannot contain invalid characters. Valid characters are a-Z, 0-9, '_', '-', '.', '\\', '/' and ' ' (white space).
+
+# File Edit Success
+file.message.editSuccess=The file has been updated.
+file.message.deleteSuccess=The file has been deleted.
+file.message.replaceSuccess=The file has been replaced.
+
+# File Add/Replace operation messages
+file.addreplace.file_size_ok=File size is in range.
+file.addreplace.error.byte_abrev=B
+file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}.
+file.addreplace.error.dataset_is_null=The dataset cannot be null.
+file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null.
+file.addreplace.error.parsing=Error in parsing provided json
+file.addreplace.warning.unzip.failed=Failed to unzip the file. Saving the file as is.
+file.addreplace.warning.unzip.failed.size=A file contained in this zip file exceeds the size limit of {0}. This Dataverse installation will save and display the zipped file, rather than unpacking and displaying files.
+find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present.
+find.dataset.error.dataset.not.found.persistentId=Dataset with Persistent ID {0} not found.
+find.dataset.error.dataset.not.found.id=Dataset with ID {0} not found.
+find.dataset.error.dataset.not.found.bad.id=Bad dataset ID number: {0}.
+find.datasetlinking.error.not.found.ids=Dataset linking dataverse with dataset ID {0} and dataset linking dataverse ID {1} not found.
+find.datasetlinking.error.not.found.bad.ids=Bad dataset ID number: {0} or dataset linking dataverse ID number: {1}.
+find.dataverselinking.error.not.found.ids=Dataverse linking dataverse with dataverse ID {0} and dataverse linking dataverse ID {1} not found.
+find.dataverselinking.error.not.found.bad.ids=Bad dataverse ID number: {0} or dataverse linking dataverse ID number: {1}.
+find.datafile.error.datafile.not.found.id=File with ID {0} not found.
+find.datafile.error.datafile.not.found.bad.id=Bad file ID number: {0}.
+find.datafile.error.dataset.not.found.persistentId=Datafile with Persistent ID {0} not found.
+find.dataverse.role.error.role.not.found.id=Dataverse Role with ID {0} not found.
+find.dataverse.role.error.role.not.found.bad.id=Bad Dataverse Role ID number: {0}
+find.dataverse.role.error.role.not.found.alias=Dataverse Role with alias {0} not found.
+find.dataverse.role.error.role.builtin.not.allowed=May not delete Built In Role {0}.
+file.addreplace.error.dataset_id_not_found=There was no dataset found for ID:
+file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset.
+file.addreplace.error.filename_undetermined=The file name cannot be determined.
+file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined.
+file.addreplace.error.file_upload_failed=The file upload failed.
+file.addreplace.warning.duplicate_file=This file has the same content as {0} that is in the dataset.
+file.addreplace.error.duplicate_file.continue=You may delete if it was not intentional.
+file.addreplace.error.existing_file_to_replace_id_is_null=The ID of the existing file to replace must be provided.
+file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for ID: {0}
+file.addreplace.error.existing_file_to_replace_is_null=The file to replace cannot be null.
+file.addreplace.error.existing_file_to_replace_not_in_dataset=The file to replace does not belong to this dataset.
+file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published dataset. (The file is unpublished or was deleted from a previous version.)
+file.addreplace.content_type.header=File Type Different
+file.addreplace.already_exists.header=Duplicate File Uploaded
+file.addreplace.already_exists.header.multiple=Duplicate Files Uploaded
+file.addreplace.error.replace.new_file_has_different_content_type=The original file ({0}) and replacement file ({1}) are different file types.
+file.addreplace.error.replace.new_file_same_as_replacement=Error! You may not replace a file with a file that has duplicate content.
+file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it.
+file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file.
+file.addreplace.error.initial_file_list_empty=An error occurred and the new file was not added.
+file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files.
+file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence.)
+file.addreplace.error.only_replace_operation=This should only be called for file replace operations!
+file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion.
+file.addreplace.error.add.add_file_error=Failed to add file to dataset.
+file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset - no new files found.
+file.addreplace.success.add=File successfully added!
+file.addreplace.success.replace=File successfully replaced!
+file.addreplace.error.auth=The API key is invalid.
+file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Data Tag:
+
+
+
+# 500.xhtml
+error.500.page.title=500 Internal Server Error
+error.500.message=Internal Server Error - An unexpected error was encountered, no more information is available.
+
+# 404.xhtml
+error.404.page.title=404 Not Found
+error.404.message=Page Not Found - The page you are looking for was not found.
+
+# 403.xhtml
+error.403.page.title=403 Not Authorized
+error.403.message=Not Authorized - You are not authorized to view this page.
+
+# general error - support message
+error.support.message= If you believe this is an error, please contact {0} for assistance.
+
+# citation-frame.xhtml
+citationFrame.banner.message=If the site below does not load, the archived data can be found in the {0} {1}. {2}
+citationFrame.banner.message.here=here
+citationFrame.banner.closeIcon=Close this message, go to dataset
+citationFrame.banner.countdownMessage= This message will close in
+citationFrame.banner.countdownMessage.seconds=seconds
+
+# Friendly AuthenticationProvider names
+authenticationProvider.name.builtin=Dataverse
+authenticationProvider.name.null=(provider is unknown)
+authenticationProvider.name.github=GitHub
+authenticationProvider.name.google=Google
+authenticationProvider.name.orcid=ORCiD
+authenticationProvider.name.orcid-sandbox=ORCiD Sandbox
+authenticationProvider.name.shib=Shibboleth
+ingest.csv.invalidHeader=Invalid header row. One of the cells is empty.
+ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second.
+ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found.
+ingest.csv.nullStream=Stream can't be null.
+citationFrame.banner.countdownMessage.seconds=seconds
+
+#file-edit-popup-fragment.xhtml #editFilesFragment.xhtml
+dataset.access.accessHeader=Restrict Files and Add Dataset Terms of Access
+dataset.access.description=Restricting limits access to published files. You can add or edit Terms of Access for the dataset, and allow people to Request Access to restricted files.
+
+#datasetFieldForEditFragment.xhtml
+dataset.AddReplication=Add "Replication Data for" to Title
+dataset.replicationDataFor=Replication Data for:
+
+
+#mydata_fragment.xhtml
+mydataFragment.infoAccess=Here are all the dataverses, datasets, and files you have access to. You can filter through them by publication status and roles.
+mydataFragment.moreResults=View More Results
+mydataFragment.publicationStatus=Publication Status
+mydataFragment.roles=Roles
+mydataFragment.resultsByUserName=Results by Username
+mydataFragment.search=Search my data...
+mydata.result=Result
+mydata.results=Results
+mydata.viewnext=View Next
+mydata.more=More
+
+file.provenance=Provenance
+file.editProvenanceDialog=Provenance
+file.editProvenanceDialog.tip=Provenance is a record of the origin of your data file and any transformations it has been through. Upload a JSON file from a provenance capture tool to generate a graph of your data''s provenance. For more information, please refer to our User Guide.
+file.editProvenanceDialog.uploadSuccess=Upload complete
+file.editProvenanceDialog.uploadError=An error occurred during upload and parsing of your provenance file.
+file.editProvenanceDialog.noEntitiesError=The uploaded provenance file does not contain any entities that can be related to your Data File.
+file.editProvenanceDialog.invalidSchemaError=The uploaded provenance file does not comply with the W3C Provenance standard.
+file.editProvenanceDialog.bundleFile=Provenance File
+file.editProvenanceDialog.bundleFile.instructions=File must be JSON format and follow the W3C standard.
+file.editProvenanceDialog.bundleFile.alreadyPublished=This Provenance File has been published and cannot be replaced or removed.
+file.editProvenanceDialog.bundleEntity=Data File Entity
+file.editProvenanceDialog.bundleEntity.placeholder=Connect entity...
+file.editProvenanceDialog.bundleEntity.requiredValidation=Value is required.
+file.editProvenanceDialog.bundleEntity.tip=Select the entity in your provenance file which represents your data file.
+file.editProvenanceDialog.bundleEntity.nameHeader=Name
+file.editProvenanceDialog.bundleEntity.typeHeader=Type
+file.editProvenanceDialog.bundleEntity.entityHeader=Entity
+file.editProvenanceDialog.selectToAddBtn=Select File
+file.editProvenanceDialog.description.tip=You may also add information documenting the history of your data file, including how it was created, how it has changed, and who has worked with it.
+file.editProvenanceDialog.description=Provenance Description
+file.editProvenanceDialog.description.placeholder=Add provenance description...
+file.confirmProvenanceDialog=Provenance
+file.confirmProvenanceDialog.tip1=Once you publish this dataset, your provenance file can not be edited or replaced.
+file.confirmProvenanceDialog.tip2=Select "Cancel" to return the previous page, where you can preview your provenance file to confirm it is correct.
+file.metadataTab.provenance.header=File Provenance
+file.metadataTab.provenance.body=File Provenance information coming in a later release...
+file.metadataTab.provenance.error=Due to an internal error, your provenance information was not correctly saved.
+file.metadataTab.provenance.message=Your provenance information has been received. Please click Save Changes below to ensure all data is added to your dataset.
+
+file.provConfirm.unpublished.json=Your Provenance File will become permanent upon publishing your dataset. Please preview to confirm before publishing.
+file.provConfirm.published.json=Your Provenance File will become permanent once you click Save Changes. Please preview to confirm before you Save Changes.
+file.provConfirm.freeform=Your Provenance Description is not permanent; it can be updated at any time.
+file.provConfirm.empty=No changes have been made.
+
+file.provAlert.published.json=Your Provenance File changes have been saved to the Dataset.
+file.provAlert.unpublished.json=Your Provenance File changes will be saved to this version of the Dataset once you click on the Save Changes button.
+file.provAlert.freeform=Your Provenance Description changes will be saved to this version of the Dataset once you click on the Save Changes button.
+file.provAlert.filePage.published.json=Your Provenance File changes have been saved to the Dataset.
+file.provAlert.filePage.unpublished.json=Your Provenance File changes have been saved to this version of the Dataset.
+file.provAlert.filePage.freeform=Your Provenance Description changes have been saved to this version of the Dataset.
+
+api.prov.provJsonSaved=PROV-JSON provenance data saved for Data File:
+api.prov.provJsonDeleted=PROV-JSON deleted for the selected Data File.
+
+api.prov.error.provDisabled=This functionality has been administratively disabled.
+api.prov.error.badDataFileId=Invalid DataFile ID.
+api.prov.error.jsonUpdateNotAllowed=PROV-JSON cannot be updated for a published file that already has PROV-JSON.
+api.prov.error.entityMismatch=Entity name provided does not match any entities parsed from the uploaded PROV-JSON.
+api.prov.error.jsonDeleteNotAllowed=PROV-JSON cannot be deleted for a published file.
+api.prov.error.jsonNoContent=No provenance json available for this file.
+api.prov.error.freeformInvalidJson=A valid JSON object could not be found.
+api.prov.error.freeformMissingJsonKey=The JSON object you send must have a key called 'text'.
+api.prov.error.freeformNoText=No provenance free form text available for this file.
+api.prov.error.noDataFileFound=Could not find a file based on ID.
+
+bagit.sourceOrganization=Dataverse Installation ()
+bagit.sourceOrganizationAddress=
+bagit.sourceOrganizationEmail=
+
+#Permission.java
+permission.addDataverseDataverse=Add a dataverse within another dataverse
+permission.deleteDataset=Delete a dataset draft
+permission.deleteDataverse=Delete an unpublished dataverse
+permission.publishDataset=Publish a dataset
+permission.publishDataverse=Publish a dataverse
+permission.managePermissionsDataset=Manage permissions for a dataset
+permission.managePermissionsDataverse=Manage permissions for a dataverse
+permission.editDataset=Edit a dataset's metadata
+permission.editDataverse=Edit a dataverse's metadata, facets, customization, and templates
+permission.downloadFile=Download a file
+permission.viewUnpublishedDataset=View an unpublished dataset and its files
+permission.viewUnpublishedDataverse=View an unpublished dataverse
+permission.addDatasetDataverse=Add a dataset to a dataverse
+
+#DataverseUserPage.java
+userPage.informationUpdated=Your account information has been successfully updated.
+userPage.passwordChanged=Your account password has been successfully changed.
+confirmEmail.changed=Your email address has changed and must be re-verified. Please check your inbox at {0} and follow the link we''ve sent. \n\nAlso, please note that the link will only work for the next {1} before it has expired.
+
+#Dataset.java
+dataset.category.documentation=Documentation
+dataset.category.data=Data
+dataset.category.code=Code
+
+#DatasetVersionDifference.java
+dataset.version.file.added=Files (Added: {0}
+dataset.version.file.removed=Files (Removed: {0}
+dataset.version.file.removed2=; Removed: {0}
+dataset.version.file.replaced=Files (Replaced: {0}
+dataset.version.file.replaced2=; Replaced: {0}
+dataset.version.file.changed=Files (Changed File Metadata: {0}
+dataset.version.file.changed2=; Changed File Metadata: {0}
+dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0}
+dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0}
+
+#DataversePage.java
+dataverse.item.required=Required
+dataverse.item.required.conditional=Conditionally Required
+dataverse.item.optional=Optional
+dataverse.item.hidden=Hidden
+dataverse.edit.msg=Edit Dataverse
+dataverse.edit.detailmsg=Edit your dataverse and click Save Changes. Asterisks indicate required fields.
+dataverse.feature.update=The featured dataverses for this dataverse have been updated.
+dataverse.link.select=You must select a linking dataverse.
+dataset.noSelectedDataverse.header=Select Dataverse(s)
+dataverse.link.user=Only authenticated users can link a dataverse.
+dataverse.link.error=Unable to link {0} to {1}. An internal error occurred.
+dataverse.search.user=Only authenticated users can save a search.
+dataverse.alias=alias
+dataverse.alias.taken=This Alias is already taken.
+
+#editDatafilesPage.java
+dataset.save.fail=Dataset Save Failed
+
+dataset.files.exist=Files {0} have the same content as {1} that already exists in the dataset.
+dataset.file.exist=File {0} has the same content as {1} that already exists in the dataset.
+dataset.file.exist.test={0, choice, 1#File |2#Files |} {1} {0, choice, 1#has |2#have |} the same content as {2} that already {0, choice, 1#exist |2#exist |}in the dataset.
+dataset.files.duplicate=Files {0} have the same content as {1} that have already been uploaded.
+dataset.file.duplicate=File {0} has the same content as {1} that has already been uploaded.
+dataset.file.inline.message= This file has the same content as {0}.
+dataset.file.upload=Successful {0} is uploaded.
+dataset.file.upload.setUp.rsync.failed=Rsync upload setup failed!
+dataset.file.upload.setUp.rsync.failed.detail=Unable to find appropriate storage driver.
+dataset.file.uploadFailure=upload failure
+dataset.file.uploadFailure.detailmsg=the file {0} failed to upload!
+dataset.file.uploadWarning=upload warning
+dataset.file.uploadWorked=upload worked
+dataset.file.upload.popup.explanation.tip=For more information, please refer to the Duplicate Files section of the User Guide.
+
+#EmailValidator.java
+email.invalid=is not a valid email address.
+
+#URLValidator.java
+url.invalid=is not a valid URL.
+
+#HarvestingClientsPage.java
+harvest.start.error=Sorry, harvest could not be started for the selected harvesting client configuration (unknown server error).
+harvest.delete.error=Selected harvesting client cannot be deleted; unknown exception:
+harvest.create.error=Failed to create a new Harvesting Client configuration: no destination dataverse selected.
+harvest.createCommand.error=Harvesting client creation command failed
+harvest.create.fail=Harvesting client creation failed (reason unknown).
+harvest.update.success=Successfully updated harvesting client
+harvest.save.failure1=Failed to save harvesting client
+harvest.save.failure2=Failed to save harvesting client (reason unknown).
+
+#HarvestingSetsPage.java
+harvest.oaicreate.fail=Failed to create OAI set
+harvest.oaicreate.defaultset.fail=Failed to create the default OAI set
+harvest.oaiupdate.fail=Failed to update OAI set.
+harvest.oaiupdate.success=Successfully updated OAI set "{0}".
+harvest.delete.fail=Failed to delete harvesting set; unknown exception:
+harvest.reexport.fail=Sorry, could not start re-export on selected OAI set (unknown server error).
+harvest.search.failed=Search failed for the query provided. Message from the Dataverse search server:
+
+#LoginPage.java
+login.Username/Email=Please enter a Username
+login.Password=Please enter a Password
+
+#SystemConfig.java
+system.app.terms=There are no Terms of Use for this Dataverse installation.
+system.api.terms=There are no API Terms of Use for this Dataverse installation.
+
+#DatasetPage.java
+dataverse.notreleased=DataverseNotReleased
+dataverse.release.authenticatedUsersOnly=Only authenticated users can release a dataverse.
+dataset.registration.failed=Dataset Registration Failed
+dataset.registered=DatasetRegistered
+dataset.registered.msg=Your dataset is now registered.
+dataset.notlinked=DatasetNotLinked
+dataset.notlinked.msg=There was a problem linking this dataset to yours:
+datasetversion.archive.success=Archival copy of Version successfully submitted
+datasetversion.archive.failure=Error in submitting an archival copy
+datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version.
+datasetversion.update.archive.failure=Dataset Version Update succeeded, but the attempt to update the archival copy failed.
+datasetversion.update.success=The published version of your Dataset has been updated.
+datasetversion.update.archive.success=The published version of your Dataset, and its archival copy, have been updated.
+
+#ThemeWidgetFragment.java
+theme.validateTagline=Tagline must be at most 140 characters.
+theme.urlValidate=URL validation failed.
+theme.urlValidate.msg=Please provide URL.
+dataverse.save.failed=Dataverse Save Failed -
+
+#LinkValidator.java
+link.tagline.validate=Please enter a tagline for the website to be hyperlinked with.
+
+#TemplatePage.java
+template.save.fail=Template Save Failed
+template.create=Template has been created.
+template.save=Template has been edited and saved.
+
+#GuestbookPage.java
+guestbook.save.fail=Guestbook Save Failed
+guestbook.option.msg= - An Option question requires multiple options. Please complete before saving.
+guestbook.create=The guestbook has been created.
+guestbook.save=The guestbook has been edited and saved.
+
+#Shib.java
+shib.invalidEmailAddress=The SAML assertion contained an invalid email address: "{0}".
+shib.emailAddress.error=A single valid address could not be found.
+shib.nullerror=The SAML assertion for "{0}" was null. Please contact support.
+dataverse.shib.success=Your Dataverse account is now associated with your institutional account.
+shib.convert.fail.deactivated=Your existing account cannot be converted because it has been deactivated.
+shib.createUser.fail=Couldn't create user.
+shib.duplicate.email.error=Cannot login, because the e-mail address associated with it has changed since previous login and is already in use by another account.
+
+#IngestServiceBean.java
+ingest.failed=ingest failed
+
+#ManagePermissionsPage.java
+permission.roleWasRemoved={0} role for {1} was removed.
+permission.defaultPermissionDataverseUpdated=The default permissions for this dataverse have been updated.
+permission.roleAssignedToFor={0} role assigned to {1} for {2}.
+permission.roleNotAssignedFor={0} role could NOT be assigned to {1} for {2}.
+permission.updated=updated
+permission.created=created
+permission.roleWas=The role was {0}. To assign it to a user and/or group, click on the Assign Roles to Users/Groups button in the Users/Groups section of this page.
+permission.roleNotSaved=The role was not able to be saved.
+permission.permissionsMissing=Permissions {0} missing.
+permission.CannotAssigntDefaultPermissions=Cannot assign default permissions.
+permission.default.contributor.role.none.decription=A person who has no permissions on a newly created dataset. Not recommended for dataverses with human contributors.
+permission.default.contributor.role.none.name=None
+permission.role.must.be.created.by.superuser=Roles can only be created or edited by superusers.
+permission.role.not.created.alias.already.exists=Role with this alias already exists.
+
+#ManageFilePermissionsPage.java
+permission.roleNotAbleToBeRemoved=The role assignment was not able to be removed.
+permission.fileAccessGranted=File Access request by {0} was granted.
+permission.fileAccessRejected=File Access request by {0} was rejected.
+permission.roleNotAbleToBeAssigned=The role was not able to be assigned.
+
+#ManageGroupsPage.java
+dataverse.manageGroups.create.success=Successfully created group {0}. Refresh to update your page.
+dataverse.manageGroups.save.success=Successfully saved group {0}.
+dataverse.manageGroups.delete=The group has been deleted.
+dataverse.manageGroups.nodelete=The explicit group cannot be deleted.
+dataverse.manageGroups.create.fail=Group Creation failed.
+dataverse.manageGroups.edit.fail=Group edit failed.
+dataverse.manageGroups.save.fail=Group Save failed.
+
+#ManageTemplatesPage.java
+template.makeDefault=The template has been selected as the default template for this dataverse
+template.unselectDefault=The template has been removed as the default template for this dataverse
+template.clone=The template has been copied
+template.clone.error=Template could not be copied.
+template.delete=The template has been deleted
+template.delete.error=The dataset template cannot be deleted.
+template.update=Template data updated
+template.update.error=Template update failed
+template.makeDefault.error=The dataset template cannot be made default.
+page.copy=Copy of
+
+#RolePermissionFragment.java
+permission.roleAssignedToOn=Role {0} assigned to {1} on {2}
+permission.cannotAssignRole=Can''t assign role: {0}
+permission.roleRevoked=Role assignment revoked successfully
+permission.cannotRevokeRole1=Cannot revoke role assignment - you''re missing permission {0}
+permission.cannotRevokeRole2=Cannot revoke role assignment: {0}
+permission.roleSave=Role "{0}" saved
+permission.cannotSaveRole=Cannot save role {0}
+
+#GlobalId.java
+pid.allowedCharacters=^[A-Za-z0-9._/:\\-]*
+
+#General Command Exception
+command.exception.only.superusers={1} can only be called by superusers.
+command.exception.user.deactivated={0} failed: User account has been deactivated.
+command.exception.user.deleted={0} failed: User account has been deleted.
+
+#Admin-API
+admin.api.auth.mustBeSuperUser=Forbidden. You must be a superuser.
+admin.api.migrateHDL.failure.must.be.set.for.doi=May not migrate while installation protocol set to "hdl". Protocol must be "doi"
+admin.api.migrateHDL.failure.must.be.hdl.dataset=Dataset was not registered as a HDL. It cannot be migrated.
+admin.api.migrateHDL.success=Dataset migrate HDL registration complete. Dataset re-registered successfully.
+admin.api.migrateHDL.failure=Failed to migrate Dataset Handle id: {0}
+admin.api.migrateHDL.failureWithException=Failed to migrate Dataset Handle id: {0} Unexpected exception: {1}
+admin.api.deleteUser.failure.prefix=Could not delete Authenticated User {0} because
+admin.api.deleteUser.failure.dvobjects= the user has created Dataverse object(s)
+admin.api.deleteUser.failure.gbResps= the user is associated with file download (Guestbook Response) record(s)
+admin.api.deleteUser.failure.roleAssignments=the user is associated with role assignment record(s)
+admin.api.deleteUser.failure.versionUser=the user has contributed to dataset version(s)
+admin.api.deleteUser.failure.savedSearches=the user has created saved searches
+admin.api.deleteUser.success=Authenticated User {0} deleted.
+
+#Files.java
+files.api.metadata.update.duplicateFile=Filename already exists at {0}
+
+#Datasets.java
+datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset.
+datasets.api.updatePIDMetadata.auth.mustBeSuperUser=Forbidden. You must be a superuser.
+datasets.api.updatePIDMetadata.success.for.single.dataset=Dataset {0} PID Metadata updated successfully.
+datasets.api.updatePIDMetadata.success.for.update.all=All Dataset PID Metadata update completed successfully.
+datasets.api.moveDataset.error.targetDataverseNotFound=Target dataverse not found.
+datasets.api.moveDataset.error.suggestForce=Use the query parameter forceMove=true to complete the move.
+datasets.api.moveDataset.success=Dataset moved successfully.
+datasets.api.listing.error=Fatal error trying to list the contents of the dataset. Please report this error to the Dataverse administrator.
+datasets.api.datasize.storage=Total size of the files stored in this dataset: {0} bytes
+datasets.api.datasize.download=Total size of the files available for download in this version of the dataset: {0} bytes
+datasets.api.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataset. Please report this error to the Dataverse administrator.
+datasets.api.grant.role.not.found.error=Cannot find role named ''{0}'' in dataverse {1}
+datasets.api.grant.role.cant.create.assignment.error=Cannot create assignment: {0}
+datasets.api.grant.role.assignee.not.found.error=Assignee not found
+datasets.api.revoke.role.not.found.error="Role assignment {0} not found"
+datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2}
+datasets.api.privateurl.error.datasetnotfound=Could not find dataset.
+datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset.
+datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft.
+datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published.
+
+
+#Dataverses.java
+dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found.
+dataverses.api.update.default.contributor.role.success=Default contributor role for Dataverse {0} has been set to {1}.
+dataverses.api.update.default.contributor.role.failure.role.does.not.have.dataset.permissions=Role {0} does not have dataset permissions.
+dataverses.api.move.dataverse.failure.descendent=Can't move a dataverse to its descendant
+dataverses.api.move.dataverse.failure.already.member=Dataverse already in this dataverse
+dataverses.api.move.dataverse.failure.itself=Cannot move a dataverse into itself
+dataverses.api.move.dataverse.failure.not.published=Published dataverse may not be moved to unpublished dataverse. You may publish {1} and re-try the move.
+dataverses.api.move.dataverse.error.guestbook=Dataset guestbook is not in target dataverse.
+dataverses.api.move.dataverse.error.template=Dataverse template is not in target dataverse.
+dataverses.api.move.dataverse.error.featured=Dataverse is featured in current dataverse.
+dataverses.api.move.dataverse.error.metadataBlock=Dataverse metadata block is not in target dataverse.
+dataverses.api.move.dataverse.error.dataverseLink=Dataverse is linked to target dataverse or one of its parents.
+dataverses.api.move.dataverse.error.datasetLink=Dataset is linked to target dataverse or one of its parents.
+dataverses.api.move.dataverse.error.forceMove=Please use the parameter ?forceMove=true to complete the move. This will remove anything from the dataverse that is not compatible with the target dataverse.
+
+#Access.java
+access.api.allowRequests.failure.noDataset=Could not find Dataset with id: {0}
+access.api.allowRequests.failure.noSave=Problem saving dataset {0}: {1}
+access.api.allowRequests.allows=allows
+access.api.allowRequests.disallows=disallows
+access.api.allowRequests.success=Dataset {0} {1} file access requests.
+access.api.fileAccess.failure.noUser=Could not find user to execute command: {0}
+access.api.requestAccess.failure.commandError=Problem trying request access on {0} : {1}
+access.api.requestAccess.failure.requestExists=An access request for this file on your behalf already exists.
+access.api.requestAccess.failure.invalidRequest=You may not request access to this file. It may already be available to you.
+access.api.requestAccess.noKey=You must provide a key to request access to a file.
+access.api.requestAccess.fileNotFound=Could not find datafile with id {0}.
+access.api.requestAccess.invalidRequest=This file is already available to you for download or you have a pending request
+access.api.requestAccess.requestsNotAccepted=Requests for access are not accepted on the Dataset.
+access.api.requestAccess.success.for.single.file=Access to File {0} requested.
+access.api.rejectAccess.failure.noPermissions=Requestor does not have permission to manage file download requests.
+access.api.grantAccess.success.for.single.file=Access to File {0} granted.
+access.api.grantAccess.noAssigneeFound=Could not find assignee with identifier {0}.
+access.api.grantAccess.failure.commandError=Problem trying grant access on {0} : {1}
+access.api.fileAccess.rejectFailure.noRequest=No request for access to file {0} for user {1}
+access.api.rejectAccess.success.for.single.file=Access to File {0} rejected.
+access.api.revokeAccess.noRoleFound=No File Downloader role found for user {0}
+access.api.revokeAccess.success.for.single.file=File Downloader access has been revoked for user {0} on file {1}
+access.api.requestList.fileNotFound=Could not find datafile with id {0}.
+access.api.requestList.noKey=You must provide a key to get list of access requests for a file.
+access.api.requestList.noRequestsFound=There are no access requests for this file {0}.
+access.api.exception.metadata.not.available.for.nontabular.file=This type of metadata is only available for tabular files.
+access.api.exception.metadata.restricted.no.permission=You do not have permission to download this file.
+access.api.exception.version.not.found=Could not find requested dataset version.
+access.api.exception.dataset.not.found=Could not find requested dataset.
+
+#permission
+permission.AddDataverse.label=AddDataverse
+permission.AddDataset.label=AddDataset
+permission.ViewUnpublishedDataverse.label=ViewUnpublishedDataverse
+permission.ViewUnpublishedDataset.label=ViewUnpublishedDataset
+permission.DownloadFile.label=DownloadFile
+permission.EditDataverse.label=EditDataverse
+permission.EditDataset.label=EditDataset
+permission.ManageDataversePermissions.label=ManageDataversePermissions
+permission.ManageDatasetPermissions.label=ManageDatasetPermissions
+permission.PublishDataverse.label=PublishDataverse
+permission.PublishDataset.label=PublishDataset
+permission.DeleteDataverse.label=DeleteDataverse
+permission.DeleteDatasetDraft.label=DeleteDatasetDraft
+
+permission.AddDataverse.desc=Add a dataverse within another dataverse
+permission.DeleteDatasetDraft.desc=Delete a dataset draft
+permission.DeleteDataverse.desc=Delete an unpublished dataverse
+permission.PublishDataset.desc=Publish a dataset
+permission.PublishDataverse.desc=Publish a dataverse
+permission.ManageDatasetPermissions.desc=Manage permissions for a dataset
+permission.ManageDataversePermissions.desc=Manage permissions for a dataverse
+permission.EditDataset.desc=Edit a dataset's metadata
+permission.EditDataverse.desc=Edit a dataverse's metadata, facets, customization, and templates
+permission.DownloadFile.desc=Download a file
+permission.ViewUnpublishedDataset.desc=View an unpublished dataset and its files
+permission.ViewUnpublishedDataverse.desc=View an unpublished dataverse
+permission.AddDataset.desc=Add a dataset to a dataverse
+
+packageDownload.title=Package File Download
+packageDownload.instructions=Use the Download URL in a Wget command or a download manager to download this package file. Download via web browser is not recommended. User Guide - Downloading a Dataverse Package via URL
+packageDownload.urlHeader=Download URL
+
+#mydata_fragment.xhtml
+Published=Published
+Unpublished=Unpublished
+Draft=Draft
+In\u0020Review=In Review
+Deaccessioned=Deaccessioned
+
+#Managegroupspage.java
+dataverse.manageGroups.user=user
+dataverse.manageGroups.users=users
+dataverse.manageGroups.group=group
+dataverse.manageGroups.groups=groups
+dataverse.manageGroups.nomembers=No Members
+dataverse.manageGroups.unknown=unknown
+dataverse.manageGroups.User=User
+dataverse.manageGroups.Group=Group
+
+#editFilesFragment.xhtml
+editfilesfragment.mainlabel=Select Language Encoding...
+editfilesfragment.label1=West European
+editfilesfragment.label1.item1=Western (ISO-8859-1)
+editfilesfragment.label1.item2=Western (ISO-8859-15)
+editfilesfragment.label1.item3=Western (Windows-1252)
+editfilesfragment.label1.item4=Western (MacRoman)
+editfilesfragment.label1.item5=Western (IBM-850)
+editfilesfragment.label1.item6=Celtic (ISO-8859-14)
+editfilesfragment.label1.item7=Greek (ISO-8859-7)
+editfilesfragment.label1.item8=Greek (Windows-1253)
+editfilesfragment.label1.item9=Greek (MacGreek)
+editfilesfragment.label1.item10=Icelandic (MacIcelandic)
+editfilesfragment.label1.item11=Nordic (ISO-8859-10)
+editfilesfragment.label1.item12=South European (ISO-8859-3)
+editfilesfragment.label2=East European
+editfilesfragment.label2.item1=Baltic (ISO-8859-4)
+editfilesfragment.label2.item2=Baltic (ISO-8859-13)
+editfilesfragment.label2.item3=Baltic (Windows-1257)
+editfilesfragment.label2.item4=Cyrillic (ISO-8859-5)
+editfilesfragment.label2.item5=Cyrillic (ISO-IR-111)
+editfilesfragment.label2.item6=Cyrillic (Windows-1251)
+editfilesfragment.label2.item7=Cyrillic (MacCyrillic)
+editfilesfragment.label2.item8=Cyrillic/Ukrainian (MacUkrainian)
+editfilesfragment.label2.item9=Cyrillic (KOI8-R)
+editfilesfragment.label2.item10=Cyrillic/Ukrainian (KOI8-U)
+editfilesfragment.label2.item11=Croatian (MacCroatian)
+editfilesfragment.label2.item12=Romanian (MacRomanian)
+editfilesfragment.label2.item13=Romanian (ISO-8859-16)
+editfilesfragment.label2.item14=Central European (ISO-8859-2)
+editfilesfragment.label2.item15=Central European (Windows-1250)
+editfilesfragment.label2.item16=Central European (MacCE)
+editfilesfragment.label2.item17=Cyrillic (IBM-855)
+editfilesfragment.label3=East Asian
+editfilesfragment.label3.item1=Japanese (ISO-2022-JP)
+editfilesfragment.label3.item2=Japanese (Shift_JIS)
+editfilesfragment.label3.item3=Japanese (EUC-JP)
+editfilesfragment.label3.item4=Chinese Traditional (Big5)
+editfilesfragment.label3.item5=Chinese Traditional (Big5-HKSCS)
+editfilesfragment.label3.item6=Chinese Traditional (EUC-TW)
+editfilesfragment.label3.item7=Chinese Simplified (GB2312)
+editfilesfragment.label3.item8=Chinese Simplified (HZ)
+editfilesfragment.label3.item9=Chinese Simplified (GBK)
+editfilesfragment.label3.item10=Chinese Simplified (ISO-2022-CN)
+editfilesfragment.label3.item11=Korean (EUC-KR)
+editfilesfragment.label3.item12=Korean (JOHAB)
+editfilesfragment.label3.item13=Korean (ISO-2022-KR)
+editfilesfragment.label4=Unicode
+editfilesfragment.label4.item1=Unicode (UTF-8)
+editfilesfragment.label4.item2=Unicode (UTF-16LE)
+editfilesfragment.label4.item3=Unicode (UTF-16BE)
+editfilesfragment.label5=US-ASCII
+
+isrequired={0} is required.
+isrequired.conditional={0} is required if you choose to enter a value in any of the optional {1} fields.
+draftversion=DRAFT VERSION
+deaccessionedversion=DEACCESSIONED VERSION
+
+not_restricted=Not Restricted
+editdatafilepage.defaultLanguageEncoding=UTF8 (default)
+passwdVal.passwdReq.each=each
+passwdVal.passwdReq.uppercase=uppercase
+passwdVal.passwdReq.lowercase=lowercase
+passwdVal.passwdReq.letter=letter
+passwdVal.passwdReq.numeral=numeral
+passwdVal.passwdReq.special=special
+dataretrieverAPI.noMsgResultsFound=Sorry, no results were found.
+
+#xlsxfilereader.java
+xlsxfilereader.ioexception.parse=Could not parse Excel/XLSX spreadsheet. {0}
+xlsxfilereader.ioexception.norows=No rows of data found in the Excel (XLSX) file.
+xlsxfilereader.ioexception.onlyonerow=Only one row of data (column name header?) detected in the Excel (XLSX) file.
+xlsxfilereader.ioexception.failed=Failed to read line {0} during the second pass.
+xlsxfilereader.ioexception.mismatch=Reading mismatch, line {0} during the second pass: {1} delimited values expected, {2} found.
+xlsxfilereader.ioexception.linecount=Mismatch between line counts in first and final passes!
+
+#rtabfileparser.java
+rtabfileparser.ioexception.failed=Failed to read line {0} of the Data file.
+rtabfileparser.ioexception.mismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found.
+rtabfileparser.ioexception.boolean=Unexpected value for the Boolean variable ({0}):
+rtabfileparser.ioexception.read=Couldn't read Boolean variable ({0})!
+rtabfileparser.ioexception.parser1=R Tab File Parser: Could not obtain varQnty from the dataset metadata.
+rtabfileparser.ioexception.parser2=R Tab File Parser: varQnty=0 in the dataset metadata!
+
+#ConfigureFragmentBean.java
+configurefragmentbean.apiTokenGenerated=API Token will be generated. Please keep it secure as you would do with a password.
+
+#FacetCategory - staticSearchFields
+staticSearchFields.dvCategory=Dataverse Category
+staticSearchFields.metadataSource=Metadata Source
+staticSearchFields.publicationDate=Publication Year
+staticSearchFields.fileTypeGroupFacet=File Type
+staticSearchFields.dvObjectType=Type
+staticSearchFields.fileTag=File Tag
+staticSearchFields.fileAccess=Access
+staticSearchFields.publicationStatus=Publication Status
+staticSearchFields.subject_ss=Subject
+
+#dataverse category - Facet Labels
+Researcher=Researcher
+Research\u0020Project=Research Project
+Journal=Journal
+Organization\u0020or\u0020Institution=Organization or Institution
+Teaching\u0020Course=Teaching Course
+Research\u0020Group=Research Group
+Laboratory=Laboratory
+Department=Department
+Uncategorized=Uncategorized
+
+#filetype - Facet Labels
+Document=Document
+Text=Text
+Tabular\u0020Data=Tabular Data
+Data=Data
+FITS=FITS
+Shape=Shape
+Image=Image
+Network\u0020Data=Network Data
+Unknown=Unknown
+Documentation=Documentation
+Code=Code
+Archive=Archive
+Audio=Audio
+Video=Video
+
+#access - Facet Labels
+Public=Public
+Restricted=Restricted
+
+
+#Shibboleth login
+idp.fatal.divMissing=
specified as "insertAtDiv" could not be located in the HTML
+idp.fatal.noXMLHttpRequest=Browser does not support XMLHttpRequest, unable to load IdP selection data
+idp.fatal.wrongProtocol=Policy supplied to DS was not "urn:oasis:names:tc:SAML:profiles:SSO:idpdiscovery-protocol:single"
+idp.fatal.wrongEntityId=entityId supplied by SP did not match configuration
+idp.fatal.noData=Metadata download returned no data
+idp.fatal.loadFailed=Failed to download metadata from
+idp.fatal.noparms=No parameters to discovery session and no defaultReturn parameter configured
+idp.fatal.noReturnURL=No URL return parameter provided
+idp.fatal.badProtocol=Return request must start with https:// or http://
+idp.idpPreferred.label=Use a previous selection:
+idp.idpEntry.label=Or enter your institution's name.
+idp.idpEntry.NoPreferred.label=Enter your institution's name and click "Continue" to log in via your institution's authentication system.
+idp.idpList.label=Or select your institution from the list below.
+idp.idpList.NoPreferred.label=Select your institution and click "Continue" to log in via your institution's authentication system.
+idp.idpList.defaultOptionLabel=Please select...
+idp.idpList.showList=Allow me to pick from a list
+idp.idpList.showSearch=Allow me to type the name of my institution
+idp.submitButton.label=Continue
+idp.helpText=Help
+idp.defaultLogoAlt=
+
+#externaltools
+externaltools.dct.displayname=Data Curation Tool
+externaltools.dct.description=Data Curation Tool for curation of variables
+externaltools.explorer.displayname=Data Explorer
+externaltools.explorer.description=The Data Explorer provides a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis.
+
+# api/admin/datasetfield/load
+api.admin.datasetfield.load.ArrayIndexOutOfBoundMessage=Error parsing metadata block in {0} part, line #{1}: missing ''{2}'' column (#{3})
+api.admin.datasetfield.load.GeneralErrorMessage=Error parsing metadata block in {0} part, line #{1}: {2}
+
+#PIDs
+pids.api.reservePid.success=PID reserved for {0}
+pids.api.deletePid.success=PID deleted for {0}
+pids.deletePid.failureExpected=Unable to delete PID {0}. Status code: {1}.
+pids.deletePid.failureOther=Problem deleting PID {0}: {1}
+pids.commands.reservePid.failure=Problem reserving PID for dataset id {0}: {1}.
+pids.datacite.errors.noResponseCode=Problem getting HTTP status code from {0}. Is it in DNS? Is doi.dataciterestapiurlstring configured properly?
+pids.datacite.errors.DoiOnly=Only doi: is supported.
+
+#PublishDatasetCommand
+publishDatasetCommand.pidNotReserved=Cannot publish dataset because its persistent identifier has not been reserved.
+
+# APIs
+api.errors.invalidApiToken=Invalid API token.
diff --git a/distros/dataverse.no/modification/analytics.xhtml b/distros/dataverse.no/modification/analytics.xhtml
new file mode 100644
index 0000000..a42753d
--- /dev/null
+++ b/distros/dataverse.no/modification/analytics.xhtml
@@ -0,0 +1,23 @@
+
+
+
+
+
+
diff --git a/distros/dataverse.no/modification/dataverse_footer.xhtml b/distros/dataverse.no/modification/dataverse_footer.xhtml
new file mode 100644
index 0000000..01be3ed
--- /dev/null
+++ b/distros/dataverse.no/modification/dataverse_footer.xhtml
@@ -0,0 +1,92 @@
+
+
+
+
+
+
From 528089e0732dcabbf8a35b46c528ec5e2c7eb950 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 12 Jul 2022 09:17:16 +0000
Subject: [PATCH 015/354] analytics initialisation
---
distros/dataverse.no/init.d/100-analytics.sh | 1 +
1 file changed, 1 insertion(+)
create mode 100644 distros/dataverse.no/init.d/100-analytics.sh
diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/distros/dataverse.no/init.d/100-analytics.sh
@@ -0,0 +1 @@
+
From f197f8b2433c2420ea02e673f3d6359af8e3619a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 12 Jul 2022 09:30:05 +0000
Subject: [PATCH 016/354] analytics initialization update
---
distros/dataverse.no/init.d/100-analytics.sh | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh
index 8b13789..cb1c4ba 100644
--- a/distros/dataverse.no/init.d/100-analytics.sh
+++ b/distros/dataverse.no/init.d/100-analytics.sh
@@ -1 +1,2 @@
-
+curl -o $DOCROOT_DIR/analytics.xhtml https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/analytics.xhtml
+curl -X PUT -d $DOCROOT_DIR/analytics.xhtml http://localhost:8080/api/admin/settings/:WebAnalyticsCode
From e88d9a08ed79aa4434e4d303fdf410c6f86795c8 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 12 Jul 2022 09:47:04 +0000
Subject: [PATCH 017/354] analytics init update
---
distros/dataverse.no/init.d/100-analytics.sh | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh
index cb1c4ba..3c9c536 100644
--- a/distros/dataverse.no/init.d/100-analytics.sh
+++ b/distros/dataverse.no/init.d/100-analytics.sh
@@ -1,2 +1,6 @@
curl -o $DOCROOT_DIR/analytics.xhtml https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/analytics.xhtml
-curl -X PUT -d $DOCROOT_DIR/analytics.xhtml http://localhost:8080/api/admin/settings/:WebAnalyticsCode
+
+if [ ! -z "$WEBANALYTICSON" ]
+then
+ curl -X PUT -d $DOCROOT_DIR/analytics.xhtml http://localhost:8080/api/admin/settings/:WebAnalyticsCode
+fi
From 816d157ec799d2591ae854635e35af4de57d29b5 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 12 Jul 2022 10:43:40 +0000
Subject: [PATCH 018/354] Bundle.property update
---
.../modification/Bundle.properties | 24 ++-----------------
1 file changed, 2 insertions(+), 22 deletions(-)
diff --git a/distros/dataverse.no/modification/Bundle.properties b/distros/dataverse.no/modification/Bundle.properties
index 6266510..93ddde7 100644
--- a/distros/dataverse.no/modification/Bundle.properties
+++ b/distros/dataverse.no/modification/Bundle.properties
@@ -188,10 +188,7 @@ wasReturnedByReviewer=, was returned by the curator of
# TODO: Confirm that "toReview" can be deleted.
toReview=Don't forget to publish it or send it back to the contributor!
# Bundle file editors, please note that "notification.welcome" is used in a unit test.
-#notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address.
-#UB
notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check our user guides at http://site.uit.no/dataverseno/deposit/ or contact DataverseNO support for assistance: http://site.uit.no/dataverseno/support/.
-
notification.demoSite=Demo Site
notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}).
notification.grantFileAccess=Access granted for files in dataset: {0}.
@@ -306,16 +303,8 @@ login.System=Login System
login.forgot.text=Forgot your password?
login.builtin=Dataverse Account
login.institution=Institutional Account
-#login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
-#UB
login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
-
-
-#login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance.
-#UB
login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance.
-
-
login.builtin.credential.usernameOrEmail=Username/Email
login.builtin.credential.password=Password
login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
@@ -715,10 +704,7 @@ notification.email.wasSubmittedForReview={0} (view at {1}) was submitted for rev
notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}).
notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}).
notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen.
-#notification.email.closing=\n\nYou may contact us for support at {0}.\n\nThank you,\n{1}
-#UB
notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1}
-
notification.email.closing.html=
You may contact us for support at {0}.
Thank you, {1}
notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}).
notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}).
@@ -890,10 +876,7 @@ dataverse.results.btn.addData.newDataverse=New Dataverse
dataverse.results.btn.addData.newDataset=New Dataset
dataverse.results.dialog.addDataGuest.header=Add Data
dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset.
-#dataverse.results.dialog.addDataGuest.msg.signup=Sign up or log in to create a dataverse or add a dataset.
-#UB:
dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset.
-
dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account
dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account
dataverse.results.types.dataverses=Dataverses
@@ -1674,11 +1657,8 @@ file.download.filetype.unknown=Original File Format
file.more.information.link=Link to more file information for
file.requestAccess=Request Access
file.requestAccess.dialog.msg=You need to Log In to request access.
-#file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
-#UB
-fietAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
-
-.accessRequested=Access Requested
+file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
+file.accessRequested=Access Requested
file.ingestInProgress=Ingest in progress...
file.dataFilesTab.metadata.header=Metadata
file.dataFilesTab.metadata.addBtn=Add + Edit Metadata
From 8542da5eb58827b44f5a089dc3165b8b1785115b Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 12 Jul 2022 10:52:24 +0000
Subject: [PATCH 019/354] Bundle.properties.patch
---
.../modification/Bundle.properties.patch | 49 +++++++++++++++++++
1 file changed, 49 insertions(+)
create mode 100644 distros/dataverse.no/modification/Bundle.properties.patch
diff --git a/distros/dataverse.no/modification/Bundle.properties.patch b/distros/dataverse.no/modification/Bundle.properties.patch
new file mode 100644
index 0000000..1a04e63
--- /dev/null
+++ b/distros/dataverse.no/modification/Bundle.properties.patch
@@ -0,0 +1,49 @@
+--- Bundle.properties 2021-08-04 19:13:08.000000000 +0000
++++ /root/git/dataverse-docker/distros/dataverse.no/modification/Bundle.properties 2022-07-12 10:41:34.201813777 +0000
+@@ -188,7 +188,7 @@
+ # TODO: Confirm that "toReview" can be deleted.
+ toReview=Don't forget to publish it or send it back to the contributor!
+ # Bundle file editors, please note that "notification.welcome" is used in a unit test.
+-notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address.
++notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check our user guides at http://site.uit.no/dataverseno/deposit/ or contact DataverseNO support for assistance: http://site.uit.no/dataverseno/support/.
+ notification.demoSite=Demo Site
+ notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}).
+ notification.grantFileAccess=Access granted for files in dataset: {0}.
+@@ -303,8 +303,8 @@
+ login.forgot.text=Forgot your password?
+ login.builtin=Dataverse Account
+ login.institution=Institutional Account
+-login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
+-login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance.
++login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
++login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance.
+ login.builtin.credential.usernameOrEmail=Username/Email
+ login.builtin.credential.password=Password
+ login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
+@@ -704,7 +704,7 @@
+ notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}).
+ notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}).
+ notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen.
+-notification.email.closing=\n\nYou may contact us for support at {0}.\n\nThank you,\n{1}
++notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1}
+ notification.email.closing.html=
You may contact us for support at {0}.
Thank you, {1}
+ notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}).
+ notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}).
+@@ -876,7 +876,7 @@
+ dataverse.results.btn.addData.newDataset=New Dataset
+ dataverse.results.dialog.addDataGuest.header=Add Data
+ dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset.
+-dataverse.results.dialog.addDataGuest.msg.signup=Sign up or log in to create a dataverse or add a dataset.
++dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset.
+ dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account
+ dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account
+ dataverse.results.types.dataverses=Dataverses
+@@ -1657,7 +1657,7 @@
+ file.more.information.link=Link to more file information for
+ file.requestAccess=Request Access
+ file.requestAccess.dialog.msg=You need to Log In to request access.
+-file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
++file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
+ file.accessRequested=Access Requested
+ file.ingestInProgress=Ingest in progress...
+ file.dataFilesTab.metadata.header=Metadata
From 15e7dd30763e9d91f860b86e75914c09f1a26b41 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 12 Jul 2022 11:04:22 +0000
Subject: [PATCH 020/354] patch uit customisation
---
distros/dataverse.no/init.d/0001-bundle.sh | 5 +++++
1 file changed, 5 insertions(+)
create mode 100644 distros/dataverse.no/init.d/0001-bundle.sh
diff --git a/distros/dataverse.no/init.d/0001-bundle.sh b/distros/dataverse.no/init.d/0001-bundle.sh
new file mode 100644
index 0000000..77eba03
--- /dev/null
+++ b/distros/dataverse.no/init.d/0001-bundle.sh
@@ -0,0 +1,5 @@
+apt-get install patch -y
+curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch
+patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch
+
+
From fdb9fbfbf137c4c40c820fca4f2b104fb373cf37 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 14 Jul 2022 09:41:53 +0000
Subject: [PATCH 021/354] updated afiliation script, require python3.7 or more
---
.../dataverse.no/init.d/affiliations/affiliation2data.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/affiliations/affiliation2data.py b/distros/dataverse.no/init.d/affiliations/affiliation2data.py
index 232e499..c889325 100644
--- a/distros/dataverse.no/init.d/affiliations/affiliation2data.py
+++ b/distros/dataverse.no/init.d/affiliations/affiliation2data.py
@@ -1,4 +1,5 @@
import pandas as pd
+import json
# id | dvno_affiliation | dvno_group_name | dvno_email_level
#-----+-------------------------------------+------------------+------------------
@@ -20,7 +21,10 @@ def reload_affiliations(loc):
#print(str(affiliations.iloc[[i]]['dvno_affiliation'].values[0]))
dvno_email_level = len(str(affiliations.iloc[[i]]['dvno_group_name']).split('.'))
#print(subdomains)
- sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliations.iloc[[i]]['dvno_affiliation'].values[0], affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level)
+ affiliation = affiliations.iloc[[i]]['dvno_affiliation'].values[0]
+ affiliation = str(affiliation).replace("'", "\'\'")
+ #affiliation = "q['%s']" % affiliation
+ sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliation, affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level)
print(sql)
return
From 0fef3ccea307680734b7c7d55a261293d053b230 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 14 Jul 2022 09:53:10 +0000
Subject: [PATCH 022/354] init.d script for afiliation triggeer
---
distros/dataverse.no/init.d/202-trigger.sh | 5 +++++
1 file changed, 5 insertions(+)
create mode 100755 distros/dataverse.no/init.d/202-trigger.sh
diff --git a/distros/dataverse.no/init.d/202-trigger.sh b/distros/dataverse.no/init.d/202-trigger.sh
new file mode 100755
index 0000000..cb45abe
--- /dev/null
+++ b/distros/dataverse.no/init.d/202-trigger.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+python3 ./affiliations/affiliation2data.py > /tmp/affiliations.sql
+export PGPASSWORD=`cat /secrets/db/password`
+psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/extratrigger.sql
+psql -U dataverse dataverse -h postgres -f /tmp/affiliations.sql
From 3454b89066890689f5cb31dc657bc5b143974f44 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 20 Jul 2022 09:44:17 +0000
Subject: [PATCH 023/354] Added afiliation Curl for feide field
---
distros/dataverse.no/init.d/023-afilliation.sh | 3 +++
1 file changed, 3 insertions(+)
create mode 100644 distros/dataverse.no/init.d/023-afilliation.sh
diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh
new file mode 100644
index 0000000..1510d7a
--- /dev/null
+++ b/distros/dataverse.no/init.d/023-afilliation.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute
+
From aa58d547f36899c8e68fe423e78ee1d8df2b1ac5 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 20 Jul 2022 09:52:04 +0000
Subject: [PATCH 024/354] updatetrigger.sql
---
.../init.d/affiliations/updatetrigger.sql | 22 +++++++++++++++++++
.../init.d/affiliations/updatetrigger.sql.2 | 2 ++
2 files changed, 24 insertions(+)
create mode 100644 distros/dataverse.no/init.d/affiliations/updatetrigger.sql
create mode 100644 distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2
diff --git a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql
new file mode 100644
index 0000000..95e6530
--- /dev/null
+++ b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql
@@ -0,0 +1,22 @@
+CREATE TABLE IF NOT EXISTS public.dvnoaffiliations (
+ id bigint,
+ dvno_affiliation character varying(255) DEFAULT NULL::character varying,
+ dvno_group_name character varying(255) DEFAULT NULL::character varying,
+ dvno_email_level integer DEFAULT 2
+);
+
+ALTER TABLE public.dvnoaffiliations OWNER TO dataverse;
+
+CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger
+ LANGUAGE plpgsql
+ AS $$
+
+BEGIN
+IF NEW.actionsubtype='login' THEN
+ update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1);
+ update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1);
+END IF
+RETURN NULL;
+END;
+$$
+CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate();
diff --git a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2
new file mode 100644
index 0000000..c1307ec
--- /dev/null
+++ b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2
@@ -0,0 +1,2 @@
+DROP TRIGGER affiliation_trigger_actionlog on public.actionlogrecord;
+CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate();
From b3c9176edde2e99e54ee3219163f5c26e5431267 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 22 Jul 2022 08:04:25 +0000
Subject: [PATCH 025/354] new trigger solution
---
.../affiliations/builtinuser_trigger.sql | 20 +++++++++++++++++++
.../affiliations/builtinuser_trigger.sql.2 | 1 +
2 files changed, 21 insertions(+)
create mode 100644 distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql
create mode 100644 distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2
diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql
new file mode 100644
index 0000000..8c5ecb8
--- /dev/null
+++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql
@@ -0,0 +1,20 @@
+CREATE TABLE IF NOT EXISTS public.dvnoaffiliations (
+ id bigint,
+ dvno_affiliation character varying(255) DEFAULT NULL::character varying,
+ dvno_group_name character varying(255) DEFAULT NULL::character varying,
+ dvno_email_level integer DEFAULT 2
+);
+
+ALTER TABLE public.dvnoaffiliations OWNER TO dataverse;
+
+CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger
+ LANGUAGE plpgsql
+ AS $$
+
+BEGIN
+ update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1);
+ update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1);
+RETURN NULL;
+END;
+$$
+CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate();
diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2
new file mode 100644
index 0000000..cb2835b
--- /dev/null
+++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2
@@ -0,0 +1 @@
+CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate();
From 4136c07a36bf462ec0c9cb712447a7e917c1286e Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 22 Jul 2022 09:17:39 +0000
Subject: [PATCH 026/354] updated docker-compose.yaml
---
distros/dataverse.no/docker-compose.yaml | 17 +++++++++++------
1 file changed, 11 insertions(+), 6 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index a625eba..9afb711 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -47,15 +47,18 @@ services:
- "POSTGRES_PASSWORD"
- "POSTGRES_PORT"
volumes:
- - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down
- # - /mntblob/database-data-prod:/var/lib/postgresql/data/
- # - /extdisk/database-data-demo:/var/lib/postgresql/data/
+ #- ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down
+ - ${POSTGRESTMP}/:/mnttmp/
+ - /extdisk/database-data-prod:/var/lib/postgresql/data/
+ # - /extdisk/database-data-demo:/var/lib/postgresql/data/
+
shibboleth:
networks:
- traefik
- image: ${DOCKER_HUB}/shibboleth:3.1.0
+ image: shibboleth:3.3.0
+ #image: ${DOCKER_HUB}/shibboleth:3.1.0
container_name: shibboleth
privileged: true
ports:
@@ -170,7 +173,8 @@ services:
dataverse:
networks:
- traefik
- image: coronawhy/dataverse:${VERSION}
+ image: ${DOCKER_HUB}/dataverse:${VERSION}
+ #image: coronawhy/dataverse:${VERSION}
container_name: dataverse
privileged: true
user:
@@ -250,13 +254,14 @@ services:
- "RSERVE_USER=rserve"
- "RSERVE_PASSWORD=rserve"
- "JVM_OPTS='-Xmx4g -Xms4g -XX:MaxPermSize=4g -XX:PermSize=4g'"
+ - "WEBANALYTICSON"
depends_on:
- postgres
- solr
volumes:
- ${CONFIGURATION_PATH}/secrets:/secrets
- ${LOCAL_STORAGE}/data:/data
- - ${DOCROOT}/docroot:/opt/docroot
+ - ${DOCROOT}/docroot:/opt/payara/docroot
- ./configs/domain.xml:/opt/payara/domain.xml
- ./init.d:/opt/payara/init.d
- /mnt:/mnt
From 78e9a8efca816f50716a43082fa3343ae095112f Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 22 Jul 2022 09:21:58 +0000
Subject: [PATCH 027/354] update docker compose
---
distros/dataverse.no/docker-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 9afb711..f7eabae 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -57,8 +57,8 @@ services:
shibboleth:
networks:
- traefik
- image: shibboleth:3.3.0
- #image: ${DOCKER_HUB}/shibboleth:3.1.0
+ #image: shibboleth:3.3.0
+ image: ${DOCKER_HUB}/shibboleth:3.3.0
container_name: shibboleth
privileged: true
ports:
From 1367d15d49f9b55375be76c003e8910085a604a1 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 25 Jul 2022 09:17:05 +0200
Subject: [PATCH 028/354] Update docker-compose.yaml
fix postgres location
---
distros/dataverse.no/docker-compose.yaml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 9afb711..f1552a7 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -47,10 +47,10 @@ services:
- "POSTGRES_PASSWORD"
- "POSTGRES_PORT"
volumes:
- #- ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down
+ - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down
- ${POSTGRESTMP}/:/mnttmp/
- - /extdisk/database-data-prod:/var/lib/postgresql/data/
- # - /extdisk/database-data-demo:/var/lib/postgresql/data/
+ #- /extdisk/database-data-prod:/var/lib/postgresql/data/
+ #- /extdisk/database-data-demo:/var/lib/postgresql/data/
From 6743a2c9ff1005e8d685f5ad119c2eccd24ee68a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 25 Jul 2022 07:35:08 +0000
Subject: [PATCH 029/354] fixed init.d
---
distros/dataverse.no/init.d/005-reindex.sh | 3 +++
distros/dataverse.no/init.d/100-analytics.sh | 0
distros/dataverse.no/init.d/201-bundle.sh | 7 +++++++
distros/dataverse.no/init.d/202-trigger.sh | 5 +++--
.../init.d/affiliations/.updatetrigger.sql.swp | Bin 0 -> 12288 bytes
.../init.d/affiliations/extratrigger.sql | 9 ---------
.../init.d/affiliations/extratrigger.sql.2 | 1 +
.../init.d/affiliations/extratrigger.sql.3 | 12 ++++++++++++
8 files changed, 26 insertions(+), 11 deletions(-)
create mode 100755 distros/dataverse.no/init.d/005-reindex.sh
mode change 100644 => 100755 distros/dataverse.no/init.d/100-analytics.sh
create mode 100755 distros/dataverse.no/init.d/201-bundle.sh
create mode 100644 distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp
create mode 100644 distros/dataverse.no/init.d/affiliations/extratrigger.sql.2
create mode 100644 distros/dataverse.no/init.d/affiliations/extratrigger.sql.3
diff --git a/distros/dataverse.no/init.d/005-reindex.sh b/distros/dataverse.no/init.d/005-reindex.sh
new file mode 100755
index 0000000..465d5b0
--- /dev/null
+++ b/distros/dataverse.no/init.d/005-reindex.sh
@@ -0,0 +1,3 @@
+# Reindex all datasets
+curl http://localhost:8080/api/admin/index/clear
+curl http://localhost:8080/api/admin/index
diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh
old mode 100644
new mode 100755
diff --git a/distros/dataverse.no/init.d/201-bundle.sh b/distros/dataverse.no/init.d/201-bundle.sh
new file mode 100755
index 0000000..8e30c1b
--- /dev/null
+++ b/distros/dataverse.no/init.d/201-bundle.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+/usr/bin/apt-get install patch -y
+/usr/bin/curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch
+/usr/bin/patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch
+
+
diff --git a/distros/dataverse.no/init.d/202-trigger.sh b/distros/dataverse.no/init.d/202-trigger.sh
index cb45abe..0d371ea 100755
--- a/distros/dataverse.no/init.d/202-trigger.sh
+++ b/distros/dataverse.no/init.d/202-trigger.sh
@@ -1,5 +1,6 @@
#!/bin/bash
-python3 ./affiliations/affiliation2data.py > /tmp/affiliations.sql
+python3 ${INIT_SCRIPTS_FOLDER}/affiliations/affiliation2data.py > /tmp/affiliations.sql
export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/extratrigger.sql
+psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql
+psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql.2
psql -U dataverse dataverse -h postgres -f /tmp/affiliations.sql
diff --git a/distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp b/distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp
new file mode 100644
index 0000000000000000000000000000000000000000..4db26fc7fe8f54dfabee21e2c71f00d4a80c7fab
GIT binary patch
literal 12288
zcmeI2&2G~`5XZM1`4&hVVM;3{QQM>~Dpaj1;igWA6rAGNltZP+u{Y@o$2RLtN-ppY
zJP#6_xNzcaK;ppIag&fBK@Uh2jitXWZ)QK{H#;jup7#3l+Edsrt~0!DGNwvj+B=sS
zTe!!V3`3PP|L#&eR%>49zT~p#Im#Ju8S`Qg&h~qg3|Q(4RT^7m-AbN_l?>y_57@mV
z0-=gt$?0^2FB~PpATISI?8}uD-7d}+zw&1%p?4xc1m+3!RcCGcn*CsNBTeiDMRJ6d}qIZ`v5AS5pg_&peN)IAH1c(3;AOb{y
z2oM1xKm>>Y5g-CY-~tkGJjU+g_x^XNw)$7MxLd@0lviH0}5VofLfQ}4%khD%U#&CMVB~P1MT^aVHR^asbUfgeEX{-pk
zdERq`zY4kI=F0I={`eH{E8}}1R{$sQ;2h>6fPBn-?yA{If4#ub1c`^jOC0V9^mS?w
z$-De@)RH{%9hc{SHpF#5&fyK0nR-7Pp^@{WR`fv)Bfgiz2#X*G7(tw=J%m^}QYq0x
zN<4PK7d@e11B3Jrh2xI{kr+fTNqi#~RouxS@rl}v+wXzNs+bCPLsU)6HN3>0{unE~oz?fD~4ggEruaDDsF0mR%
zMq<-|aoIU$aJFkw>}h7Td_LelhD6~$mT;g~G`nuWX1e~_=(s12#LbF!14j;%?A+Yi
zDvUcNr^L?AoYCrJtf|b0IlFTVCOl{h%o4SFMs&oYhT&*fc-GaLR`cwDn
Date: Mon, 25 Jul 2022 07:46:38 +0000
Subject: [PATCH 030/354] removed unused script
---
distros/dataverse.no/init.d/0001-bundle.sh | 5 -----
.../dataverse.no/init.d/012-minio-bucket1.sh | 17 -----------------
distros/dataverse.no/init.d/105-reindex.sh | 3 ---
3 files changed, 25 deletions(-)
delete mode 100644 distros/dataverse.no/init.d/0001-bundle.sh
delete mode 100755 distros/dataverse.no/init.d/012-minio-bucket1.sh
delete mode 100755 distros/dataverse.no/init.d/105-reindex.sh
diff --git a/distros/dataverse.no/init.d/0001-bundle.sh b/distros/dataverse.no/init.d/0001-bundle.sh
deleted file mode 100644
index 77eba03..0000000
--- a/distros/dataverse.no/init.d/0001-bundle.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-apt-get install patch -y
-curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch
-patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch
-
-
diff --git a/distros/dataverse.no/init.d/012-minio-bucket1.sh b/distros/dataverse.no/init.d/012-minio-bucket1.sh
deleted file mode 100755
index 6fe5e7b..0000000
--- a/distros/dataverse.no/init.d/012-minio-bucket1.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-
-# MinIO bucket activation
-# https://guides.dataverse.org/en/latest/installation/config.html#id87
-if [ "${minio_label_1}" ]; then
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.type\=s3"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.label\=${minio_label_1}"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.bucket-name\=${minio_bucket_1}"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.download-redirect\=false"
-# asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.upload-redirect=true"
-# asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.ingestsizelimit=13107200"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.url-expiration-minutes\=120"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.connection-pool-size\=4096"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.profile\=${minio_profile_1}"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.custom-endpoint-url\=${minio_custom_endpoint}"
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.${bucketname_1}.path-style-access\=true"
-fi
diff --git a/distros/dataverse.no/init.d/105-reindex.sh b/distros/dataverse.no/init.d/105-reindex.sh
deleted file mode 100755
index 465d5b0..0000000
--- a/distros/dataverse.no/init.d/105-reindex.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-# Reindex all datasets
-curl http://localhost:8080/api/admin/index/clear
-curl http://localhost:8080/api/admin/index
From 59f35fbe936a3cf770669284a6fde354fde64651 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 25 Jul 2022 08:12:26 +0000
Subject: [PATCH 031/354] updated affiliations/builtinuser_trigger.sql
---
distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql | 1 -
1 file changed, 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql
index 8c5ecb8..6c8fd47 100644
--- a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql
+++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql
@@ -17,4 +17,3 @@ BEGIN
RETURN NULL;
END;
$$
-CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate();
From cf1ee5f7dcfc6445fdb7839fb9223d6703df0ffb Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 27 Jul 2022 07:37:03 +0000
Subject: [PATCH 032/354] Curl comment for Feide affiliation option
---
distros/dataverse.no/init.d/023-afilliation.sh | 1 +
1 file changed, 1 insertion(+)
diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh
index 1510d7a..51fcd94 100644
--- a/distros/dataverse.no/init.d/023-afilliation.sh
+++ b/distros/dataverse.no/init.d/023-afilliation.sh
@@ -1,3 +1,4 @@
#!/bin/bash
curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute
+curl -X PUT -d True http://localhost:8080/api/admin/settings/ShibAffiliationFeide
From 7992e09bbc62c337872682e424fab302bfef0892 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 27 Jul 2022 09:13:20 +0000
Subject: [PATCH 033/354] fixed affiliation curl
---
distros/dataverse.no/init.d/023-afilliation.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh
index 51fcd94..8921267 100644
--- a/distros/dataverse.no/init.d/023-afilliation.sh
+++ b/distros/dataverse.no/init.d/023-afilliation.sh
@@ -1,4 +1,4 @@
#!/bin/bash
curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute
-curl -X PUT -d True http://localhost:8080/api/admin/settings/ShibAffiliationFeide
+curl -X PUT -d True http://localhost:8080/api/admin/settings/:ShibAffiliationFeide
From a6c6115215e89a61e7a56151b0b26d04de64101c Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 5 Aug 2022 09:42:42 +0000
Subject: [PATCH 034/354] Added script to backup and delete old logs from
database table actionlogrecord
---
.../init.d/cronjob/actionlog/actionlogrecord.sh | 7 +++++++
.../init.d/cronjob/actionlog/actionlogrecord.sql | 2 ++
2 files changed, 9 insertions(+)
create mode 100644 distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh
create mode 100644 distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql
diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh
new file mode 100644
index 0000000..1acad6e
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+export PGPASSWORD=`cat /secrets/db/password`
+pg_dump -U dataverse dataverse -h postgres -t actionlogrecord > /tmp/actionlogrecord$(date +'%Y%m%d').dump
+gzip --force /tmp/actionlogrecord$(date +'%Y%m%d').dump
+psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/actionlog/actionlogrecord.sql
+
+
diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql
new file mode 100644
index 0000000..9a90101
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql
@@ -0,0 +1,2 @@
+DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days';
+
From 0c6f36354ddfee3b0af7fbcab2856cc320e5ceaf Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 5 Aug 2022 10:36:05 +0000
Subject: [PATCH 035/354] updatted database log loccation
---
.../dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh
index 1acad6e..4cfd6fc 100644
--- a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh
+++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh
@@ -1,7 +1,7 @@
#!/bin/bash
export PGPASSWORD=`cat /secrets/db/password`
-pg_dump -U dataverse dataverse -h postgres -t actionlogrecord > /tmp/actionlogrecord$(date +'%Y%m%d').dump
-gzip --force /tmp/actionlogrecord$(date +'%Y%m%d').dump
+pg_dump -U dataverse dataverse -h postgres -t actionlogrecord > ${DATA_DIR}/actionlogrecord$(date +'%Y%m%d').dump
+gzip --force ${DATA_DIR}/actionlogrecord$(date +'%Y%m%d').dump
psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/actionlog/actionlogrecord.sql
From 60e49227a2c7a68ef8c21a31771a2bd7e5c18b46 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 22 Aug 2022 07:35:37 +0000
Subject: [PATCH 036/354] added persistant index functionality
---
distros/dataverse.no/docker-compose.yaml | 2 +-
distros/dataverse.no/init.d/005-reindex.sh | 3 ---
distros/dataverse.no/init.d/023-afilliation.sh | 4 ++--
distros/dataverse.no/migration/create-backup-db.sh | 6 +++++-
4 files changed, 8 insertions(+), 7 deletions(-)
delete mode 100755 distros/dataverse.no/init.d/005-reindex.sh
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 6a67076..23d161f 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -92,7 +92,7 @@ services:
- "SOLR_JAVA_MEM=-Xms4g -Xmx4g"
- "SOLR_OPTS=-Dlog4j2.formatMsgNoLookups=true"
volumes:
- - solr-data:/opt/solr/server/solr/collection1/data
+ - solr-data:/var/solr/data
- ./configs/schema.xml:/var/solr/data/collection1/conf/schema.xml
labels:
- "traefik.enable=true"
diff --git a/distros/dataverse.no/init.d/005-reindex.sh b/distros/dataverse.no/init.d/005-reindex.sh
deleted file mode 100755
index 465d5b0..0000000
--- a/distros/dataverse.no/init.d/005-reindex.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-# Reindex all datasets
-curl http://localhost:8080/api/admin/index/clear
-curl http://localhost:8080/api/admin/index
diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh
index 8921267..686b39a 100644
--- a/distros/dataverse.no/init.d/023-afilliation.sh
+++ b/distros/dataverse.no/init.d/023-afilliation.sh
@@ -1,4 +1,4 @@
#!/bin/bash
curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute
-curl -X PUT -d True http://localhost:8080/api/admin/settings/:ShibAffiliationFeide
-
+curl -X PUT -d "lastAffiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationOrder
+#curl -X PUT -d "firstAffiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationOrder
diff --git a/distros/dataverse.no/migration/create-backup-db.sh b/distros/dataverse.no/migration/create-backup-db.sh
index efe88b5..4646ce8 100644
--- a/distros/dataverse.no/migration/create-backup-db.sh
+++ b/distros/dataverse.no/migration/create-backup-db.sh
@@ -3,5 +3,9 @@ docker exec -it postgres bash -c "pg_dump -U dataverse dataverse > /var/lib/p
gzip -c /extdisk/database-data-demo/dataverse.dump > "/extdisk/database-data-demo/dataverse$(date +'%Y%m%d').dump.gz"
docker exec -it postgres bash -c "createdb -U dataverse dataverse-tmp"
docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/dataverse.dump"
-cp ./cleanup-database.sql /var/lib/postgresql/data/
+docker cp ./cleanup-database.sql postgres:/var/lib/postgresql/data/
docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/cleanup-database.sql"
+docker exec -it postgres bash -c "pg_dump -U dataverse dataverse-tmp > /var/lib/postgresql/data/dataverseCL.dump"
+docker exec -it postgres bash -c "dropdb -U dataverse dataverse-tmp"
+gzip -c /extdisk/database-data-demo/dataverse.dump > "/extdisk/database-data-demo/dataverseCL$(date +'%Y%m%d').dump.gz"
+
From 63743ca5b14cb08366ac4078791d40b512bfc8d0 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 23 Aug 2022 09:13:21 +0000
Subject: [PATCH 037/354] Added Make data count
---
.../init.d/203-counterprocessor.sh | 23 +++++++++++++++++++
1 file changed, 23 insertions(+)
create mode 100644 distros/dataverse.no/init.d/203-counterprocessor.sh
diff --git a/distros/dataverse.no/init.d/203-counterprocessor.sh b/distros/dataverse.no/init.d/203-counterprocessor.sh
new file mode 100644
index 0000000..1ca0075
--- /dev/null
+++ b/distros/dataverse.no/init.d/203-counterprocessor.sh
@@ -0,0 +1,23 @@
+
+#!/bin/bash
+
+mkdir /opt/payara/counter-processor
+cd /opt/payara/counter-processor
+wget https://github.com/CDLUC3/counter-processor/archive/v${COUNTERPROSVERSION}.tar.gz -O v${COUNTERPROSVERSION}.tar.gz
+tar xvfz v${COUNTERPROSVERSION}.tar.gz
+cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}
+curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=${GEOIPLICENSE}&suffix=tar.gz" -o GeoLite2-Country.tar.gz \
+ && tar -xzvf GeoLite2-Country.tar.gz \
+ && mv GeoLite2-Country_*/GeoLite2-Country.mmdb /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/maxmind_geoip
+
+wget https://guides.dataverse.org/en/latest/_downloads/a65ffc2dba9f406858591558ae92790c/setup-counter-processor.sh -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/setup-counter-processor.sh
+wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce5e6b83e/counter-processor-config.yaml -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/counter-processor-config.yaml
+
+curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/mdc' http://localhost:8080/api/admin/settings/:MDCLogPath
+curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics
+pip3 install -r requirements.txt --ignore-installed PyYAML
+
+
+curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath
+curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics
+
From a87b69f92ebfe656417b18d025e49d3c55e1a495 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 23 Aug 2022 09:27:52 +0000
Subject: [PATCH 038/354] updated docker compose for make data count variable
---
distros/dataverse.no/docker-compose.yaml | 3 +++
1 file changed, 3 insertions(+)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 23d161f..bbe6658 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -255,6 +255,9 @@ services:
- "RSERVE_PASSWORD=rserve"
- "JVM_OPTS='-Xmx4g -Xms4g -XX:MaxPermSize=4g -XX:PermSize=4g'"
- "WEBANALYTICSON"
+ - "COUNTERPROSVERSION"
+ - "GEOIPLICENSE"
+ - "CONFIG_FILE"
depends_on:
- postgres
- solr
From 032ad3fcb26dcb82017dac921b0b06afb471c2ac Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 25 Aug 2022 15:20:42 +0000
Subject: [PATCH 039/354] updated makedatacount
---
distros/dataverse.no/init.d/203-counterprocessor.sh | 2 +-
distros/dataverse.no/init.d/cronjob/makedatacount.sh | 4 ++++
2 files changed, 5 insertions(+), 1 deletion(-)
create mode 100644 distros/dataverse.no/init.d/cronjob/makedatacount.sh
diff --git a/distros/dataverse.no/init.d/203-counterprocessor.sh b/distros/dataverse.no/init.d/203-counterprocessor.sh
index 1ca0075..099dbca 100644
--- a/distros/dataverse.no/init.d/203-counterprocessor.sh
+++ b/distros/dataverse.no/init.d/203-counterprocessor.sh
@@ -16,7 +16,7 @@ wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce
curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/mdc' http://localhost:8080/api/admin/settings/:MDCLogPath
curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics
pip3 install -r requirements.txt --ignore-installed PyYAML
-
+export ALLOWED_ENV=year_month
curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs' http://localhost:8080/api/admin/settings/:MDCLogPath
curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics
diff --git a/distros/dataverse.no/init.d/cronjob/makedatacount.sh b/distros/dataverse.no/init.d/cronjob/makedatacount.sh
new file mode 100644
index 0000000..ebf6c18
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/makedatacount.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+export YEAR_MONTH=$(date '+%Y-%m')
+cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}
+python3.8 main.py
From d98aab4cd937a88c156dfbbbb6fc99a05aac0df7 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 26 Aug 2022 07:55:41 +0000
Subject: [PATCH 040/354] added script to start cron
---
startup.sh | 5 +++++
1 file changed, 5 insertions(+)
create mode 100755 startup.sh
diff --git a/startup.sh b/startup.sh
new file mode 100755
index 0000000..5c8802e
--- /dev/null
+++ b/startup.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+docker-compose up -d
+sleep 10
+docker exec dataverse /etc/init.d/cron restart
+
From b2add71ebcc54022c686ea25604903c4bab77cba Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 26 Aug 2022 08:59:26 +0000
Subject: [PATCH 041/354] documentation about environmental variables
---
doc/env.rst | 105 ++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 105 insertions(+)
create mode 100644 doc/env.rst
diff --git a/doc/env.rst b/doc/env.rst
new file mode 100644
index 0000000..01976dc
--- /dev/null
+++ b/doc/env.rst
@@ -0,0 +1,105 @@
+environment variables
+=====================
+
+main configuration
+------------------
+
+CONFIGURATION_PATH=/distrib/private
+DOCROOT=/distrib
+VERSION=5.9.1
+DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno
+SECRETS_DIR="${CONFIGURATION_PATH}/secrets"
+POSTGRESTMP=/mnt/tmp/postgres
+
+
+Dataverse database settings
+---------------------------
+
+DATAVERSE_DB_HOST=postgres
+DATAVERSE_DB_USER=dataverse
+DATAVERSE_DB_PASSWORD=password
+DATAVERSE_DB_NAME=dataverse
+
+solr
+----
+
+SOLR_SERVICE_HOST=solr:8983
+SOLR_SERVICE_PORT=8983
+DATAVERSE_URL=localhost:8080
+DATAVERSE_SERVICE_HOST=localhost
+LOCAL_STORAGE=/mntblob
+
+Conter Processor
+----------------
+
+COUNTERPROSVERSION=0.1.04
+GEOIPLICENSE=licencekey
+CONFIG_FILE=counter-processor-config.yaml
+
+Postgres settings
+-----------------
+
+POSTGRES_USER=dataverse
+POSTGRES_PASSWORD=
+POSTGRES_SERVER=postgres
+POSTGRES_DATABASE=dataverse
+POSTGRES_DB=dataverse
+
+Domain configuration and init folder
+------------------------------------
+
+hostname=test-docker.dataverse.no
+traefikhost=test-docker.dataverse.no
+INIT_SCRIPTS_FOLDER=/opt/payara/init.d
+
+Webhook configuration to bundle external services
+-------------------------------------------------
+
+WEBHOOK=/opt/payara/triggers/external-services.py
+#CESSDA=True
+#CLARIN=True
+
+DOI parameters
+--------------
+
+# https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring
+doi_authority=10.21337
+doi_provider=DataCite
+doi_username=username
+doi_password=password
+dataciterestapiurlstring=https\:\/\/api.test.datacite.org
+baseurlstring=https\:\/\/mds.test.datacite.org
+
+AWS settings
+------------
+
+# https://guides.dataverse.org/en/latest/installation/config.html#id90
+aws_bucket_name=2002-green-dataversenotest1
+aws_s3_profile=cloudian
+aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
+
+AWS UiT
+---------
+
+aws_uit_bucket_name=p-uit-dataverse01-sth
+aws_uit_s3_profile=uit
+#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
+
+Mail relay
+---------
+
+# https://guides.dataverse.org/en/latest/developers/troubleshooting.html
+system_email=
+mailhost=smtp-relay.exemple.com
+mailuser=no-reply@dataverse.no
+no_reply_email=no-reply@dataverse.no
+smtp_password=password
+smtp_port=465
+socket_port=465
+
+Federated authentification file
+-------------------------------
+
+# https://guides.dataverse.org/en/latest/installation/shibboleth.html
+federated_json_file=/secrets/openid.json
+
From ca9f73f444d540727fb7e19adadff55b01c848dc Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 26 Aug 2022 09:03:52 +0000
Subject: [PATCH 042/354] forting updated
---
doc/env.rst | 41 +++++++++++++++++++++++++++++++++++++++++
1 file changed, 41 insertions(+)
diff --git a/doc/env.rst b/doc/env.rst
index 01976dc..d538fa9 100644
--- a/doc/env.rst
+++ b/doc/env.rst
@@ -5,10 +5,15 @@ main configuration
------------------
CONFIGURATION_PATH=/distrib/private
+
DOCROOT=/distrib
+
VERSION=5.9.1
+
DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno
+
SECRETS_DIR="${CONFIGURATION_PATH}/secrets"
+
POSTGRESTMP=/mnt/tmp/postgres
@@ -16,90 +21,126 @@ Dataverse database settings
---------------------------
DATAVERSE_DB_HOST=postgres
+
DATAVERSE_DB_USER=dataverse
+
DATAVERSE_DB_PASSWORD=password
+
DATAVERSE_DB_NAME=dataverse
solr
----
SOLR_SERVICE_HOST=solr:8983
+
SOLR_SERVICE_PORT=8983
+
DATAVERSE_URL=localhost:8080
+
DATAVERSE_SERVICE_HOST=localhost
+
LOCAL_STORAGE=/mntblob
Conter Processor
----------------
COUNTERPROSVERSION=0.1.04
+
GEOIPLICENSE=licencekey
+
CONFIG_FILE=counter-processor-config.yaml
Postgres settings
-----------------
POSTGRES_USER=dataverse
+
POSTGRES_PASSWORD=
+
POSTGRES_SERVER=postgres
+
POSTGRES_DATABASE=dataverse
+
POSTGRES_DB=dataverse
Domain configuration and init folder
------------------------------------
hostname=test-docker.dataverse.no
+
traefikhost=test-docker.dataverse.no
+
INIT_SCRIPTS_FOLDER=/opt/payara/init.d
Webhook configuration to bundle external services
-------------------------------------------------
WEBHOOK=/opt/payara/triggers/external-services.py
+
#CESSDA=True
+
#CLARIN=True
DOI parameters
--------------
# https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring
+
doi_authority=10.21337
+
doi_provider=DataCite
+
doi_username=username
+
doi_password=password
+
dataciterestapiurlstring=https\:\/\/api.test.datacite.org
+
baseurlstring=https\:\/\/mds.test.datacite.org
AWS settings
------------
# https://guides.dataverse.org/en/latest/installation/config.html#id90
+
aws_bucket_name=2002-green-dataversenotest1
+
aws_s3_profile=cloudian
+
aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
AWS UiT
---------
aws_uit_bucket_name=p-uit-dataverse01-sth
+
aws_uit_s3_profile=uit
+
#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
Mail relay
---------
# https://guides.dataverse.org/en/latest/developers/troubleshooting.html
+
system_email=
+
mailhost=smtp-relay.exemple.com
+
mailuser=no-reply@dataverse.no
+
no_reply_email=no-reply@dataverse.no
+
smtp_password=password
+
smtp_port=465
+
socket_port=465
Federated authentification file
-------------------------------
# https://guides.dataverse.org/en/latest/installation/shibboleth.html
+
federated_json_file=/secrets/openid.json
From 03abf14439a87dbc8e0daa62898ea1695fae576d Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 26 Aug 2022 09:08:16 +0000
Subject: [PATCH 043/354] added comments
---
doc/env.rst | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/doc/env.rst b/doc/env.rst
index d538fa9..41678db 100644
--- a/doc/env.rst
+++ b/doc/env.rst
@@ -1,6 +1,14 @@
-environment variables
+Environment variables
=====================
+To run Dataverse as a completely operational production service, data providers should fill all settings in the configuration file containing information about their domain name, DOIs settings, the language of web interface, mail relay, external controlled vocabularies and storage. There is also possibility to integrate Docker based custom services in the infrastructure and create own software packages serving the needs of the specific data providers, for example, to integrate a separate Shibboleth container for the federated authentication, install new data previewer or activate data processing pipeline.
+
+Configuration
+~~~~~~~~~~~~~
+
+The configuration is managed in the central place in an environmental variables file called .env, so administrators have no need to modify other files in the software package. It contains all necessary settings required to deploy Dataverse, for example, to set the language or web interface, establish connection to the local database, SOLR search engine, mail relay or external storage.
+
+
main configuration
------------------
From 6c853dd7902495c2beacef5539db234b2e98c883 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 26 Aug 2022 09:20:52 +0000
Subject: [PATCH 044/354] added installation instructions
---
doc/installation.rst | 23 +++++++++++++++++++++++
1 file changed, 23 insertions(+)
create mode 100644 doc/installation.rst
diff --git a/doc/installation.rst b/doc/installation.rst
new file mode 100644
index 0000000..a6329b0
--- /dev/null
+++ b/doc/installation.rst
@@ -0,0 +1,23 @@
+dataverse.no installation
+=========================
+
+Prerequisites : have sudo rights
+
+''
+sudo su
+sudo apt-get update
+sudo apt-get install \
+ ca-certificates \
+ curl \
+ gnupg \
+ lsb-release
+curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+echo \
+ "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+sudo apt-get update
+sudo apt-get install docker-ce docker-ce-cli containerd.io
+
+''
From ec1a998334ea7740b2cfe03b4c5369f072fe44ba Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 26 Aug 2022 09:26:45 +0000
Subject: [PATCH 045/354] updated code block
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index a6329b0..67ba13b 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -3,7 +3,7 @@ dataverse.no installation
Prerequisites : have sudo rights
-''
+``
sudo su
sudo apt-get update
sudo apt-get install \
@@ -20,4 +20,4 @@ echo \
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
-''
+``
From 075a5648ab5c358d02f8336e5279b54289f66344 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 26 Aug 2022 09:30:05 +0000
Subject: [PATCH 046/354] updated code block
---
doc/installation.rst | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 67ba13b..2987833 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -3,7 +3,7 @@ dataverse.no installation
Prerequisites : have sudo rights
-``
+..
sudo su
sudo apt-get update
sudo apt-get install \
@@ -19,5 +19,4 @@ echo \
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
-
-``
+..
From b84bf44ab7e6211ef7dca30e76decb794996b547 Mon Sep 17 00:00:00 2001
From: Vyacheslav Tykhonov <4tikhonov@users.noreply.github.com>
Date: Fri, 26 Aug 2022 11:31:04 +0200
Subject: [PATCH 047/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 2987833..b74178b 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -3,7 +3,7 @@ dataverse.no installation
Prerequisites : have sudo rights
-..
+``
sudo su
sudo apt-get update
sudo apt-get install \
@@ -19,4 +19,4 @@ echo \
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
-..
+``
From 1df43dcfd687375690ac032f2fabb2838003a3ef Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 26 Aug 2022 11:37:29 +0200
Subject: [PATCH 048/354] Update installation.rst
---
doc/installation.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index b74178b..1a74458 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -20,3 +20,4 @@ echo \
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
``
+``sudo su``
From bfa67cb354814e3db8f3c91bd36c6abfc50cb943 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 30 Aug 2022 11:13:35 +0200
Subject: [PATCH 049/354] Update installation.rst
---
doc/installation.rst | 57 ++++++++++++++++++++++++++++++--------------
1 file changed, 39 insertions(+), 18 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 1a74458..899bca6 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -2,22 +2,43 @@ dataverse.no installation
=========================
Prerequisites : have sudo rights
+instal Prerequisites, docker, docker-compose, and git
-``
-sudo su
-sudo apt-get update
-sudo apt-get install \
- ca-certificates \
- curl \
- gnupg \
- lsb-release
-curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
-
-echo \
- "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
- $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
-
-sudo apt-get update
-sudo apt-get install docker-ce docker-ce-cli containerd.io
-``
-``sudo su``
+.. code-block:: bash
+
+ sudo su
+ apt-get update
+ apt-get install \
+ ca-certificates \
+ curl \
+ gnupg \
+ lsb-release
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+ echo \
+ "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
+ $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+ apt-get update
+ apt-get install docker-ce docker-ce-cli containerd.io
+ curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
+ chmod +x /usr/local/bin/docker-compose
+ apt-get install git
+ mkdir /distrib
+ cd /distrib
+
+
+Clone the git
+
+.. code-block:: bash
+
+ git clone https://github.com/IQSS/dataverse-docker
+ cd /distrib/dataverse-docker/
+ git branche dataverse.no
+ docker network create traefik
+ cp .env_sample .env
+
+the folowings needs to be changed in .env
+
+hostname=demo.dataverse.no
+traefikhost=demo.dataverse.no
From a0c032f127d6808dc2ee5c5675f971d66393a6bb Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 30 Aug 2022 11:14:27 +0200
Subject: [PATCH 050/354] Update installation.rst
---
doc/installation.rst | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 899bca6..cf1e523 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -40,5 +40,7 @@ Clone the git
the folowings needs to be changed in .env
+.. code-block:: bash
+
hostname=demo.dataverse.no
-traefikhost=demo.dataverse.no
+traefikhost=demo.dataverse.n
From ae06380c3029bf0f1b71b699c130e06e73704e88 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 30 Aug 2022 11:14:48 +0200
Subject: [PATCH 051/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index cf1e523..2b28c7c 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -42,5 +42,5 @@ the folowings needs to be changed in .env
.. code-block:: bash
-hostname=demo.dataverse.no
-traefikhost=demo.dataverse.n
+ hostname=demo.dataverse.no
+ traefikhost=demo.dataverse.n
From 902619cf6a83273467c8820ea8c3a44ed667f926 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 30 Aug 2022 11:15:49 +0200
Subject: [PATCH 052/354] Update .gitignore
---
.gitignore | 1 +
1 file changed, 1 insertion(+)
diff --git a/.gitignore b/.gitignore
index a8adb79..09c4d5d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,6 @@
dataverse.war
.env
+.gitignore
#Ignoring IDE files
.idea
From 9a7aa8120c2a4624702c88b007a778c9d92c8a33 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 30 Aug 2022 14:20:40 +0200
Subject: [PATCH 053/354] Update installation.rst
---
doc/installation.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index 2b28c7c..3a948b7 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -44,3 +44,10 @@ the folowings needs to be changed in .env
hostname=demo.dataverse.no
traefikhost=demo.dataverse.n
+
+
+
+Conjob to automaticaly restart dataverse
+----------------------------------------
+
+''*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no''
From 46d301e7d09db6487487245a54b0e76de746cdbf Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 30 Aug 2022 14:21:13 +0200
Subject: [PATCH 054/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 3a948b7..7226cc9 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -50,4 +50,4 @@ the folowings needs to be changed in .env
Conjob to automaticaly restart dataverse
----------------------------------------
-''*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no''
+``*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no``
From 7e7f5ff98693193ca915fd5ced9da405cff808e6 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Wed, 31 Aug 2022 10:27:04 +0200
Subject: [PATCH 055/354] Update installation.rst
---
doc/installation.rst | 115 ++++++++++++++++++++++++++++++++++++++-----
1 file changed, 104 insertions(+), 11 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 7226cc9..71ee0bd 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -2,7 +2,7 @@ dataverse.no installation
=========================
Prerequisites : have sudo rights
-instal Prerequisites, docker, docker-compose, and git
+install Prerequisites, docker, docker-compose, and git
.. code-block:: bash
@@ -24,30 +24,123 @@ instal Prerequisites, docker, docker-compose, and git
curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
apt-get install git
- mkdir /distrib
- cd /distrib
+
+Dataverse root folder
+---------------------
+
+defined in ``CONFIGURATION_PATH`` and ``DOCROOT`` default : ``/distrib/``
+
+.. code-block:: bash
+
+ export DISTRIB=/distrib
+ export CONFIGURATION_PATH=$DISTRIB/private
+ mkdir $DISTRIB
+ mkdir $CONFIGURATION_PATH
+ cd $DISTRIB
+
+
Clone the git
+-------------
.. code-block:: bash
git clone https://github.com/IQSS/dataverse-docker
- cd /distrib/dataverse-docker/
- git branche dataverse.no
- docker network create traefik
+ cd $DISTRIB/dataverse-docker/
+ git branch dataverse.no
+ cp $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH
cp .env_sample .env
+ docker network create traefik
+
+The following variables need to be changed in .env
+
+.. code-block:: bash
+
+ hostname=dataverse.no
+ traefikhost=dataverse.no
+
+main configuration
-the folowings needs to be changed in .env
+.. code-block:: bash
+
+ DISTRIB=/distrib
+ CONFIGURATION_PATH=/distrib/private
+
+Solr
+
+.. code-block:: bash
+
+ LOCAL_STORAGE=/mntblob
+
+Counter Processor
+
+.. code-block:: bash
+
+GEOIPLICENSE=licencekey
+
+Postgres settings
.. code-block:: bash
- hostname=demo.dataverse.no
- traefikhost=demo.dataverse.n
+ POSTGRES_PASSWORD=password
+
+
+DOI parameters
+.. code-block:: bash
+ doi_authority=10.21337
+ doi_username=username
+ doi_password=password
+
+Certificates installation
+-------------------------
-Conjob to automaticaly restart dataverse
-----------------------------------------
+Request the certificates from the correct authority
+
+dataverse.pem order:
+local in file $[hostmame].pem
+Intermediate in file sectigo-intermediate.pem
+Root in file sectigo-intermediate.pem
+TODO : split and cat command for automatisation
+
+
+
+certificates should be put in ´´$CONFIGURATION_PATH/configuration/files´´ there are 2 files a .pem file and a .key file
+
+The name of the certificates files should match the name in ´´$CONFIGURATION_PATH/configuration/files/certificate.toml´´
+
+Check the certificates with ´´curl -placeholder hostname ´´
+
+
+DOCROOT
+-------
+
+The appropriate docroot folder needs to be copied in ``$DISTRIB/docroot``
+for example ´´rsync -arzvP --rsh=ssh ./docroot [ServerName]:/distrib/docroot´´
+
+
+
+Apache and shibboleth configuration
+-----------------------------------
+Apache configuration
+
+Change domain name
+
+Set up shibboleth
+
+Copy keyen.sh comand
+
+
+
+
+
+Cronjob to automatically restart dataverse
+------------------------------------------
+
+NB:remeber to stop it if you want it stoped :)
``*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no``
+
+
From 70c08ef7e9f939ee222cd714b3c238eda65948f2 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Wed, 31 Aug 2022 10:56:40 +0200
Subject: [PATCH 056/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 71ee0bd..fbeeb11 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -46,7 +46,7 @@ Clone the git
.. code-block:: bash
- git clone https://github.com/IQSS/dataverse-docker
+ git clone https://github.com/DataverseNO/dataverse-docker.git
cd $DISTRIB/dataverse-docker/
git branch dataverse.no
cp $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH
From 90e816696aaedf941d7e8e4740bf2f70c7dc47f9 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Wed, 31 Aug 2022 11:07:22 +0200
Subject: [PATCH 057/354] Update installation.rst
---
doc/installation.rst | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index fbeeb11..03c8918 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -48,9 +48,7 @@ Clone the git
git clone https://github.com/DataverseNO/dataverse-docker.git
cd $DISTRIB/dataverse-docker/
- git branch dataverse.no
- cp $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH
- cp .env_sample .env
+ git checkout dataverse.no
docker network create traefik
The following variables need to be changed in .env
@@ -103,7 +101,7 @@ dataverse.pem order:
local in file $[hostmame].pem
Intermediate in file sectigo-intermediate.pem
Root in file sectigo-intermediate.pem
-TODO : split and cat command for automatisation
+To make the certificate pem file´´cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem´´
From 95e084b1417fd8b7b26c914ecb0b16bf89cb9e26 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Wed, 31 Aug 2022 11:12:55 +0200
Subject: [PATCH 058/354] Update installation.rst
---
doc/installation.rst | 12 +++++++-----
1 file changed, 7 insertions(+), 5 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 03c8918..639eb89 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -49,6 +49,8 @@ Clone the git
git clone https://github.com/DataverseNO/dataverse-docker.git
cd $DISTRIB/dataverse-docker/
git checkout dataverse.no
+ cp -r $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH
+ cp .env_sample .env
docker network create traefik
The following variables need to be changed in .env
@@ -101,22 +103,22 @@ dataverse.pem order:
local in file $[hostmame].pem
Intermediate in file sectigo-intermediate.pem
Root in file sectigo-intermediate.pem
-To make the certificate pem file´´cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem´´
+To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem``
-certificates should be put in ´´$CONFIGURATION_PATH/configuration/files´´ there are 2 files a .pem file and a .key file
+certificates should be put in ``$CONFIGURATION_PATH/configuration/files`` there are 2 files a .pem file and a .key file
-The name of the certificates files should match the name in ´´$CONFIGURATION_PATH/configuration/files/certificate.toml´´
+The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificate.toml``
-Check the certificates with ´´curl -placeholder hostname ´´
+Check the certificates with ``curl -placeholder hostname``
DOCROOT
-------
The appropriate docroot folder needs to be copied in ``$DISTRIB/docroot``
-for example ´´rsync -arzvP --rsh=ssh ./docroot [ServerName]:/distrib/docroot´´
+for example ``rsync -arzvP --rsh=ssh ./docroot [ServerName]:/distrib/docroot``
From 95dec9b32c6a61dbc1141c562e96f4d1bcfcde5d Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 31 Aug 2022 09:34:02 +0000
Subject: [PATCH 059/354] updated .env_sample
---
.env_sample | 90 +++++++++++++++++++++++++----------------------------
1 file changed, 42 insertions(+), 48 deletions(-)
diff --git a/.env_sample b/.env_sample
index 599d8e0..392ab7d 100644
--- a/.env_sample
+++ b/.env_sample
@@ -1,41 +1,44 @@
LOCAL_WAR=./dataverse.war
-#COMPOSE_FILE=./docker-compose.yml
-
-# Activate Dataverse language pack by setting language code:
-# en - English hu - Hungarian fr - French sl - Slovenian
-# se - Swedish es - Spanish it - Italian ua - Ukrainian
-# pt - Portuguese ru - Russian at - Austrian German
-# br - Brazilian Portuguese ca - French Canadian
-#MAINLANG=en
+COMPOSE_FILE=distros/dataverse.no/docker-compose.yaml
+CONFIGURATION_PATH=/distrib/private
+DOCROOT=/distrib
+VERSION= 5.11.12.7
+DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno
+SECRETS_DIR="${CONFIGURATION_PATH}/secrets"
+#WEBANALYTICSON=true
# Dataverse database settings
DATAVERSE_DB_HOST=postgres
DATAVERSE_DB_USER=dataverse
-DATAVERSE_DB_PASSWORD=dvnsecret
+DATAVERSE_DB_PASSWORD=psqlpassword
DATAVERSE_DB_NAME=dataverse
+#SOLR
SOLR_SERVICE_HOST=solr:8983
SOLR_SERVICE_PORT=8983
DATAVERSE_URL=localhost:8080
DATAVERSE_SERVICE_HOST=localhost
+LOCAL_STORAGE=/mntblob
+
+# Conter Processor
+COUNTERPROSVERSION=0.1.04
+GEOIPLICENSE=Licence
+CONFIG_FILE=counter-processor-config.yaml
# Postgres settings
POSTGRES_USER=dataverse
-POSTGRES_PASSWORD=dvnsecret
+POSTGRES_PASSWORD=psqlpassword
POSTGRES_SERVER=postgres
POSTGRES_DATABASE=dataverse
POSTGRES_DB=dataverse
+POSTGRESTMP=/mnt/tmp/postgres
+
# Domain configuration and init folder
-#hostname=www.yourdataverse.org
-hostname=locahost:8080
-#traefikhost=www.yourdataverse.org
-traefikhost=localhost:8080
+hostname=dataverse.no
+traefikhost=dataverse.no
INIT_SCRIPTS_FOLDER=/opt/payara/init.d
-# traefik email settings
-useremail=youremail@domain.com
-
# Webhook configuration to bundle external services
WEBHOOK=/opt/payara/triggers/external-services.py
#CESSDA=True
@@ -43,44 +46,35 @@ WEBHOOK=/opt/payara/triggers/external-services.py
# DOI parameters
# https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring
-#doi_authority=doi_authority
-#doi_provider=doi_provider
-#doi_shoulder=doi_shoulder
-#doi_username=doi_username
-#doi_password=doi_password
-dataciterestapiurlstring=https\\:\/\/api.test.datacite.org
+doi_authority=10.21337
+doi_provider=DataCite
+doi_username=doiusername
+doi_password=doipassword
+dataciterestapiurlstring=https\:\/\/api.test.datacite.org
baseurlstring=https\:\/\/mds.test.datacite.org
# AWS settings
# https://guides.dataverse.org/en/latest/installation/config.html#id90
-#aws_bucket_name=aws_bucket_name
-#aws_s3_profile=aws_s3_profile
-#aws_endpoint_url=aws_endpoint_url
+aws_bucket_name=awsbucketname
+aws_s3_profile=cloudian
+aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
+
+# AWS UiT
+aws_uit_bucket_name=awsbucketname2
+aws_uit_s3_profile=uit
+#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
+
# Mail relay
# https://guides.dataverse.org/en/latest/developers/troubleshooting.html
-#system_email=system_email
-#mailhost=mailhost
-#mailuser=mailuser
-#no_reply_email=no_reply_email
-#smtp_password=smtp_password
-#smtp_port=smtp_port
-#socket_port=socket_port
+system_email=
+mailhost=smtp-relay.exemple.com
+mailuser=no-reply@dataverse.no
+no_reply_email=no-reply@dataverse.no
+smtp_password=smtppassword
+smtp_port=465
+socket_port=465
# Federated authentification file
# https://guides.dataverse.org/en/latest/installation/shibboleth.html
-#federated_json_file=federated_json_file
-
-# MinIO bucket 1
-# https://guides.dataverse.org/en/latest/installation/config.html#id87
-#bucketname_1=bucketname_1
-#minio_label_1=minio_label_1
-#minio_bucket_1=minio_bucket_1
-#minio_profile_1=minio_profile_1
-
-# MinIO bucket 2
-# https://guides.dataverse.org/en/latest/installation/config.html#id87
-#bucketname_2=bucketname_2
-#minio_label_1=minio_label_2
-#minio_bucket_1=minio_bucket_2
-#minio_profile_1=minio_profile_2
+federated_json_file=/secrets/openid.json
From 70c51995f8013051fb81cbc80cf9aa52c33a1228 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Wed, 31 Aug 2022 11:53:38 +0200
Subject: [PATCH 060/354] Update installation.rst
---
doc/installation.rst | 14 +++++++++++---
1 file changed, 11 insertions(+), 3 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 639eb89..35a2a2b 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -2,7 +2,7 @@ dataverse.no installation
=========================
Prerequisites : have sudo rights
-install Prerequisites, docker, docker-compose, and git
+install Prerequisites, docker, docker-compose, and git, azure-cli
.. code-block:: bash
@@ -23,7 +23,7 @@ install Prerequisites, docker, docker-compose, and git
apt-get install docker-ce docker-ce-cli containerd.io
curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
- apt-get install git
+ apt-get install -y git azure-cli
Dataverse root folder
---------------------
@@ -51,6 +51,8 @@ Clone the git
git checkout dataverse.no
cp -r $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH
cp .env_sample .env
+ az login --identity
+ az acr login --name presacrd4oilmd5ss77y
docker network create traefik
The following variables need to be changed in .env
@@ -77,7 +79,7 @@ Counter Processor
.. code-block:: bash
-GEOIPLICENSE=licencekey
+ GEOIPLICENSE=licencekey
Postgres settings
@@ -94,6 +96,12 @@ DOI parameters
doi_username=username
doi_password=password
+AWS
+
+.. code-block:: bash
+
+
+
Certificates installation
-------------------------
From 89b04df4f753301cf73f4b257d18391ae9b18f65 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 1 Sep 2022 10:24:43 +0200
Subject: [PATCH 061/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 35a2a2b..181addb 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -20,7 +20,7 @@ install Prerequisites, docker, docker-compose, and git, azure-cli
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
apt-get update
- apt-get install docker-ce docker-ce-cli containerd.io
+ apt-get install -y docker-ce docker-ce-cli containerd.io
curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
apt-get install -y git azure-cli
From 32037c723faf6ef887bdaaf0552372bd20c188b8 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 1 Sep 2022 10:28:23 +0200
Subject: [PATCH 062/354] Update installation.rst
---
doc/installation.rst | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 181addb..d357cd2 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -1,5 +1,5 @@
-dataverse.no installation
-=========================
+Dataverse installation on Microsoft Azure
+=========================================
Prerequisites : have sudo rights
install Prerequisites, docker, docker-compose, and git, azure-cli
@@ -59,8 +59,8 @@ The following variables need to be changed in .env
.. code-block:: bash
- hostname=dataverse.no
- traefikhost=dataverse.no
+ hostname=dataverse.azure.com
+ traefikhost=dataverse.azure.om
main configuration
From 0d091a1581855d937942d42c9560599b2d8c3149 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 1 Sep 2022 10:52:09 +0200
Subject: [PATCH 063/354] Create envFileSetup
---
doc/envFileSetup | 46 ++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 46 insertions(+)
create mode 100644 doc/envFileSetup
diff --git a/doc/envFileSetup b/doc/envFileSetup
new file mode 100644
index 0000000..f082cff
--- /dev/null
+++ b/doc/envFileSetup
@@ -0,0 +1,46 @@
+The following variables need to be changed in .env depending on your instalation
+
+.. code-block:: bash
+
+ hostname=dataverse.azure.com
+ traefikhost=dataverse.azure.om
+
+main configuration
+
+.. code-block:: bash
+
+ DISTRIB=/distrib
+ CONFIGURATION_PATH=/distrib/private
+
+Solr
+
+.. code-block:: bash
+
+ LOCAL_STORAGE=/mntblob
+
+Counter Processor
+
+.. code-block:: bash
+
+ GEOIPLICENSE=licencekey
+
+Postgres settings
+
+.. code-block:: bash
+
+ POSTGRES_PASSWORD=password
+
+
+DOI parameters
+
+.. code-block:: bash
+
+ doi_authority=10.21337
+ doi_username=username
+ doi_password=doiword
+
+AWS
+
+.. code-block:: bash
+
+ aws_bucket_name=bucketName
From f331cc4b38ad5adaa543a27e73446fd85f1840f6 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 1 Sep 2022 11:00:47 +0200
Subject: [PATCH 064/354] Update installation.rst
---
doc/installation.rst | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index d357cd2..dd6ecaf 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -108,9 +108,13 @@ Certificates installation
Request the certificates from the correct authority
dataverse.pem order:
+
local in file $[hostmame].pem
+
Intermediate in file sectigo-intermediate.pem
+
Root in file sectigo-intermediate.pem
+
To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem``
From 9aeda22d489ed9796bc0cc343f2e79d3bff1bacd Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 1 Sep 2022 11:05:02 +0200
Subject: [PATCH 065/354] Update installation.rst
---
doc/installation.rst | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index dd6ecaf..05c0b8f 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -107,13 +107,13 @@ Certificates installation
Request the certificates from the correct authority
-dataverse.pem order:
+dataverse.no.pem order:
-local in file $[hostmame].pem
+local, in file $[hostname].pem
-Intermediate in file sectigo-intermediate.pem
+Intermediate, in file sectigo-intermediate.pem
-Root in file sectigo-intermediate.pem
+Root, in file sectigo-intermediate.pem
To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem``
From 7050fafe0d42472353368cb325853803fc623c4f Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 1 Sep 2022 11:54:55 +0200
Subject: [PATCH 066/354] Update installation.rst
---
doc/installation.rst | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 05c0b8f..63dbb1b 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -121,7 +121,7 @@ To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataver
certificates should be put in ``$CONFIGURATION_PATH/configuration/files`` there are 2 files a .pem file and a .key file
-The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificate.toml``
+The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificates.toml``
Check the certificates with ``curl -placeholder hostname``
@@ -144,9 +144,11 @@ Set up shibboleth
Copy keyen.sh comand
-
-
-
+Check that your dataverse instalation is axessible
+--------------------------------------------------
+.. code-block:: bash
+cd $DISTRIB/dataverse-docker/
+docker-compose up -d
Cronjob to automatically restart dataverse
------------------------------------------
From cd768f95800d0f86d2ac4cfaf14ccf42f89e9d46 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 2 Sep 2022 07:48:43 +0000
Subject: [PATCH 067/354] Disabled image magic for pdf creation
---
distros/dataverse.no/init.d/012-disable-imageMagick.sh | 3 +++
1 file changed, 3 insertions(+)
create mode 100644 distros/dataverse.no/init.d/012-disable-imageMagick.sh
diff --git a/distros/dataverse.no/init.d/012-disable-imageMagick.sh b/distros/dataverse.no/init.d/012-disable-imageMagick.sh
new file mode 100644
index 0000000..258b87c
--- /dev/null
+++ b/distros/dataverse.no/init.d/012-disable-imageMagick.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+mv /usr/bin/convert /usr/bin/convert.MOVED
+
From 3eab5346b80d0487ada2e2ef76562f718c84f863 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 2 Sep 2022 09:08:45 +0000
Subject: [PATCH 068/354] renamed certificates file for consistency
---
distros/dataverse.no/docker-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index bbe6658..2be8d72 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -67,8 +67,8 @@ services:
volumes:
- ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth
- ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf
- - ${CONFIGURATION_PATH}/configuration/files/${hostname}.pem:/etc/pki/tls/certs/localhost.crt
- - ${CONFIGURATION_PATH}/configuration/files/${hostname}.key:/etc/pki/tls/private/localhost.key
+ - ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/pki/tls/certs/localhost.crt
+ - ${CONFIGURATION_PATH}/configuration/files/localhost.key:/etc/pki/tls/private/localhost.key
hostname: ${hostname}
labels:
- "traefik.enable=true"
From 18dd7aaf8317c278a8e29a83ca67085cb9f0f17c Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 11:58:10 +0200
Subject: [PATCH 069/354] Create shibbotheth.rst
---
doc/shibbotheth.rst | 13 +++++++++++++
1 file changed, 13 insertions(+)
create mode 100644 doc/shibbotheth.rst
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
new file mode 100644
index 0000000..e1eb1d9
--- /dev/null
+++ b/doc/shibbotheth.rst
@@ -0,0 +1,13 @@
+Shibboleth
+==========
+
+Asuming a working shibboleth configuration in ``/tmp/shibboleth.tar.gz``
+
+Copy and extract the files to the proper location
+
+.. code-block:: bash
+
+ mv /tmp/shibboleth /$DISTRIB/private
+ cd /$DISTRIB/private
+ tar -xvf shibboleth.tar.gz .
+
From c9ef6b3898ee7e7b7e06be4aef1ae4e89b74f7cc Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:00:20 +0200
Subject: [PATCH 070/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index e1eb1d9..31963bc 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -11,3 +11,5 @@ Copy and extract the files to the proper location
cd /$DISTRIB/private
tar -xvf shibboleth.tar.gz .
+Change hostname identity in ``shiboleth2.xm``
+you are done (mostely)
From 240ffe4212995d61f74603bf267116dcef6cdb70 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:08:40 +0200
Subject: [PATCH 071/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index 31963bc..b6b9691 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -11,5 +11,6 @@ Copy and extract the files to the proper location
cd /$DISTRIB/private
tar -xvf shibboleth.tar.gz .
-Change hostname identity in ``shiboleth2.xm``
+Change hostname ``entityID`` in ``shiboleth2.xm``
+Change SMAL service provider location
you are done (mostely)
From 8e039a5825f76c25424886edcf7492fcd3682e04 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:17:22 +0200
Subject: [PATCH 072/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index b6b9691..a190664 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -11,6 +11,6 @@ Copy and extract the files to the proper location
cd /$DISTRIB/private
tar -xvf shibboleth.tar.gz .
-Change hostname ``entityID`` in ``shiboleth2.xm``
+Change hostname ``entityID`` in ``shibboleth2.xm``
Change SMAL service provider location
you are done (mostely)
From 1e59066938c4d9e2838d23719d0db64359d30123 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:21:17 +0200
Subject: [PATCH 073/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index a190664..4e580bf 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -7,8 +7,8 @@ Copy and extract the files to the proper location
.. code-block:: bash
- mv /tmp/shibboleth /$DISTRIB/private
- cd /$DISTRIB/private
+ mv /tmp/shibboleth $DISTRIB/private
+ cd $DISTRIB/private
tar -xvf shibboleth.tar.gz .
Change hostname ``entityID`` in ``shibboleth2.xm``
From 8579f0efdda6c994a47445ffe83b16b06cf7ba73 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:22:10 +0200
Subject: [PATCH 074/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index 4e580bf..e673043 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -7,7 +7,7 @@ Copy and extract the files to the proper location
.. code-block:: bash
- mv /tmp/shibboleth $DISTRIB/private
+ mv /tmp/shibboleth* $DISTRIB/private
cd $DISTRIB/private
tar -xvf shibboleth.tar.gz .
From d345225a0f0b7447c3ecb1226e89a9f9ea7c0150 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:26:42 +0200
Subject: [PATCH 075/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index e673043..02eae40 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -7,9 +7,10 @@ Copy and extract the files to the proper location
.. code-block:: bash
- mv /tmp/shibboleth* $DISTRIB/private
- cd $DISTRIB/private
- tar -xvf shibboleth.tar.gz .
+ export DISTRIB=/distrib
+ mv /tmp/shibboleth* $DISTRIB/private
+ cd $DISTRIB/private
+ tar -xvf shibboleth.tar.gz .
Change hostname ``entityID`` in ``shibboleth2.xm``
Change SMAL service provider location
From c67123bf3499155261163a2e9e02a8b86138a5bc Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:27:05 +0200
Subject: [PATCH 076/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index 02eae40..636c86c 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -8,7 +8,7 @@ Copy and extract the files to the proper location
.. code-block:: bash
export DISTRIB=/distrib
- mv /tmp/shibboleth* $DISTRIB/private
+ cp /tmp/shibboleth* $DISTRIB/private
cd $DISTRIB/private
tar -xvf shibboleth.tar.gz .
From 50698a7bfd5a22318371dc4261db49a0af6ec551 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:31:13 +0200
Subject: [PATCH 077/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index 636c86c..81a210a 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -10,7 +10,7 @@ Copy and extract the files to the proper location
export DISTRIB=/distrib
cp /tmp/shibboleth* $DISTRIB/private
cd $DISTRIB/private
- tar -xvf shibboleth.tar.gz .
+ tar -xvf shibboleth.tar.gz
Change hostname ``entityID`` in ``shibboleth2.xm``
Change SMAL service provider location
From 43c68a0629656d266c9826840fd0e87ee9cc276a Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:32:24 +0200
Subject: [PATCH 078/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index 81a210a..5cb173f 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -11,7 +11,8 @@ Copy and extract the files to the proper location
cp /tmp/shibboleth* $DISTRIB/private
cd $DISTRIB/private
tar -xvf shibboleth.tar.gz
+ cd $DISTRIB/private/shibboleth
-Change hostname ``entityID`` in ``shibboleth2.xm``
+Change hostname ``entityID`` in ``shibboleth2.xml``
Change SMAL service provider location
you are done (mostely)
From 21887d6a264d002cd64fb7a955d701cba2c6d5ac Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 2 Sep 2022 12:34:55 +0200
Subject: [PATCH 079/354] Update shibbotheth.rst
---
doc/shibbotheth.rst | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst
index 5cb173f..3643369 100644
--- a/doc/shibbotheth.rst
+++ b/doc/shibbotheth.rst
@@ -13,6 +13,8 @@ Copy and extract the files to the proper location
tar -xvf shibboleth.tar.gz
cd $DISTRIB/private/shibboleth
-Change hostname ``entityID`` in ``shibboleth2.xml``
-Change SMAL service provider location
+Change domain name ``entityID`` in ``shibboleth2.xml`` to the domain name of the instalation
+
+Change SMAL service provider location
+
you are done (mostely)
From 19b1a2e3b9447f30cd0c87a45380290580c86dd0 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 12:41:23 +0200
Subject: [PATCH 080/354] functionality validation how to
---
doc/functionalityValidation | 40 +++++++++++++++++++++++++++++++++++++
1 file changed, 40 insertions(+)
create mode 100644 doc/functionalityValidation
diff --git a/doc/functionalityValidation b/doc/functionalityValidation
new file mode 100644
index 0000000..33d6395
--- /dev/null
+++ b/doc/functionalityValidation
@@ -0,0 +1,40 @@
+mail relay
+----------
+ in ``.en``
+ ~~~~~~~~~~
+
+Set ``system_email=``
+
+
+set the smtp relay
+
+.. code-block:: bash
+
+ mailhost=smtp-relay.exemple.com
+ mailuser=no-reply@dataverse.no
+ no_reply_email=no-reply@dataverse.no
+ smtp_password=password
+ smtp_port=465
+ socket_port=465
+
+doi settings
+------------
+
+feide authentication
+--------------------
+
+local storage
+-------------
+
+S3 support
+----------
+
+large files
+-----------
+
+
+counter processor
+-----------------
+
+custom settings
+--------------
From 8000d96b32dc2a098cae3a42078f978a328487ef Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 12:43:10 +0200
Subject: [PATCH 081/354] Update functionalityValidation
---
doc/functionalityValidation | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/doc/functionalityValidation b/doc/functionalityValidation
index 33d6395..bea2650 100644
--- a/doc/functionalityValidation
+++ b/doc/functionalityValidation
@@ -1,3 +1,11 @@
+Default admin login
+-------------------
+
+username : dataverseAdmin
+
+password : admin
+
+
mail relay
----------
in ``.en``
From 62b680e98bb58f6d2093f3a2d3fd1b905e185a04 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 12:43:25 +0200
Subject: [PATCH 082/354] Rename functionalityValidation to
functionalityValidation.rst
---
doc/{functionalityValidation => functionalityValidation.rst} | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename doc/{functionalityValidation => functionalityValidation.rst} (100%)
diff --git a/doc/functionalityValidation b/doc/functionalityValidation.rst
similarity index 100%
rename from doc/functionalityValidation
rename to doc/functionalityValidation.rst
From e3d1ef04ab5a773a073eecc036304b564332e3a9 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 12:48:44 +0200
Subject: [PATCH 083/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index bea2650..6c15d92 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -6,6 +6,15 @@ username : dataverseAdmin
password : admin
+S3 storage
+----------
+
+For testting purpuses S3 storage fuctionality can be disabled using :
+
+``mv /distrib/dataverse-docker/distros/dataverse.no/init.d/0*s3*.sh /tmp/``
+
+then restart dataverse
+
mail relay
----------
in ``.en``
@@ -37,6 +46,11 @@ local storage
S3 support
----------
+if S3 storage was disabled re unable it using :
+
+
+``mv /tmp/0*s3*.sh /distrib/dataverse-docker/distros/dataverse.no/init.d/``
+
large files
-----------
From c4f6d6cf487bbaf882afd0dd63a8114f4424676b Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 12:49:06 +0200
Subject: [PATCH 084/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 6c15d92..29ec158 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -17,6 +17,7 @@ then restart dataverse
mail relay
----------
+
in ``.en``
~~~~~~~~~~
From ecce12de7241e63c6918617e1eab6205ca28f730 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:15:03 +0200
Subject: [PATCH 085/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 29ec158..fa0bd24 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -18,11 +18,12 @@ then restart dataverse
mail relay
----------
- in ``.en``
- ~~~~~~~~~~
+ in ``.env``
Set ``system_email=``
+for exemple ``stem_email=000xxx@uit.no``
+
set the smtp relay
@@ -35,6 +36,11 @@ set the smtp relay
smtp_port=465
socket_port=465
+in the web interface
+¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨
+Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo
+
+
doi settings
------------
From 46b9258225f20a7d9cf372c517992421d19571dd Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:15:32 +0200
Subject: [PATCH 086/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index fa0bd24..704c18c 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -37,7 +37,7 @@ set the smtp relay
socket_port=465
in the web interface
-¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨¨
+~~~~~~~~~~~~~~~~~~~~
Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo
From d355f8db25b7fc61f89e44e097ed5d68098e18f2 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:16:16 +0200
Subject: [PATCH 087/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 704c18c..339d649 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -18,7 +18,8 @@ then restart dataverse
mail relay
----------
- in ``.env``
+in ``.env``
+~~~~~~~~~~~
Set ``system_email=``
From 9c0ab98ffaad3c471e8939824cb9301cf7e096ad Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:17:15 +0200
Subject: [PATCH 088/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 339d649..7a18be2 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -9,7 +9,7 @@ password : admin
S3 storage
----------
-For testting purpuses S3 storage fuctionality can be disabled using :
+For testing purpuses S3 storage fuctionality can be disabled using :
``mv /distrib/dataverse-docker/distros/dataverse.no/init.d/0*s3*.sh /tmp/``
@@ -23,7 +23,7 @@ in ``.env``
Set ``system_email=``
-for exemple ``stem_email=000xxx@uit.no``
+for example ``stem_email=000xxx@uit.no``
set the smtp relay
From 709301b49a900cce5a564dda884516cc7d4e90ed Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:17:39 +0200
Subject: [PATCH 089/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 7a18be2..5d946b5 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -23,7 +23,7 @@ in ``.env``
Set ``system_email=``
-for example ``stem_email=000xxx@uit.no``
+for example ``system_email=000xxx@uit.no``
set the smtp relay
From 35253d6a0278e789b00f82259766088ec1937dee Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:20:22 +0200
Subject: [PATCH 090/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 5d946b5..7984dd5 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -40,6 +40,7 @@ set the smtp relay
in the web interface
~~~~~~~~~~~~~~~~~~~~
Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo
+the "Verify email" button should send en email.
doi settings
From 999f9e2adf4c4d4dfa74a9d9e8f4f22520375da0 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:20:49 +0200
Subject: [PATCH 091/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 7984dd5..f3931c4 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -40,8 +40,8 @@ set the smtp relay
in the web interface
~~~~~~~~~~~~~~~~~~~~
Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo
-the "Verify email" button should send en email.
+the "Verify email" button should send en email.
doi settings
------------
From 84f164f2a4ad6d6a0b711d3c8c23a0ebb3e55206 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 13:55:13 +0200
Subject: [PATCH 092/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index f3931c4..224c1b3 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -46,6 +46,20 @@ the "Verify email" button should send en email.
doi settings
------------
+in ``.env``
+~~~~~~~~~~~
+
+set doi configuration
+
+.. code-block:: bash
+
+in ``/secrets``
+~~~~~~~~~~~~~~~~
+
+set the password in ``$DISTRIB/private/secrets/doi_asadmin``
+``AS_ADMIN_ALIASPASSWORD=changeme``
+
+
feide authentication
--------------------
From d78c47d9d35632b07108523e91548c96ee68369f Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Mon, 5 Sep 2022 14:00:11 +0200
Subject: [PATCH 093/354] Update functionalityValidation.rst
---
doc/functionalityValidation.rst | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst
index 224c1b3..3b297aa 100644
--- a/doc/functionalityValidation.rst
+++ b/doc/functionalityValidation.rst
@@ -57,7 +57,12 @@ in ``/secrets``
~~~~~~~~~~~~~~~~
set the password in ``$DISTRIB/private/secrets/doi_asadmin``
-``AS_ADMIN_ALIASPASSWORD=changeme``
+
+for example with "changeme" as a password ``AS_ADMIN_ALIASPASSWORD=changeme``
+
+set the password in ``$DISTRIB/private/secrets/doi/password``
+
+for example with "changeme" as a password ``changeme``
feide authentication
From f15303540d9c6d2cc622d39e774a08a303e9a7ab Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 7 Sep 2022 07:40:01 +0000
Subject: [PATCH 094/354] achitecture for the aws bucket keys
---
secrets/aws-cli/.aws/config | 13 +++++++++++++
secrets/aws-cli/.aws/credentials | 7 +++++++
secrets/aws-cli/aws-list.sh | 2 ++
3 files changed, 22 insertions(+)
create mode 100755 secrets/aws-cli/.aws/config
create mode 100755 secrets/aws-cli/.aws/credentials
create mode 100755 secrets/aws-cli/aws-list.sh
diff --git a/secrets/aws-cli/.aws/config b/secrets/aws-cli/.aws/config
new file mode 100755
index 0000000..5a6050e
--- /dev/null
+++ b/secrets/aws-cli/.aws/config
@@ -0,0 +1,13 @@
+[cloudian]
+output = json
+region =
+
+[uit]
+output = json
+region =
+
+[default]
+output = json
+region =
+
+
diff --git a/secrets/aws-cli/.aws/credentials b/secrets/aws-cli/.aws/credentials
new file mode 100755
index 0000000..c1f1cce
--- /dev/null
+++ b/secrets/aws-cli/.aws/credentials
@@ -0,0 +1,7 @@
+[cloudian]
+aws_access_key_id=
+aws_secret_access_key=
+
+[uit]
+aws_access_key_id=
+aws_secret_access_key=
diff --git a/secrets/aws-cli/aws-list.sh b/secrets/aws-cli/aws-list.sh
new file mode 100755
index 0000000..3afa73c
--- /dev/null
+++ b/secrets/aws-cli/aws-list.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+aws --endpoint-url https:// s3api list-objects-v2 --bucket
Date: Wed, 7 Sep 2022 09:10:59 +0000
Subject: [PATCH 095/354] openid template in secrets
---
secrets/openid.json | 9 +++++++++
1 file changed, 9 insertions(+)
create mode 100644 secrets/openid.json
diff --git a/secrets/openid.json b/secrets/openid.json
new file mode 100644
index 0000000..87449f1
--- /dev/null
+++ b/secrets/openid.json
@@ -0,0 +1,9 @@
+{
+ "id":"feide",
+ "factoryAlias":"oidc",
+ "title":"edugain",
+ "subtitle":"",
+ "factoryData":"type: oidc | issuer: https://auth.dataporten.no | clientId: | clientSecret:",
+ "enabled":true
+}
+
From 111dae2fe0ecd807b441ea380987a338ec0d0916 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 8 Sep 2022 13:46:33 +0200
Subject: [PATCH 096/354] Create prerequisitResourses
---
doc/prerequisitResourses | 7 +++++++
1 file changed, 7 insertions(+)
create mode 100644 doc/prerequisitResourses
diff --git a/doc/prerequisitResourses b/doc/prerequisitResourses
new file mode 100644
index 0000000..777ae62
--- /dev/null
+++ b/doc/prerequisitResourses
@@ -0,0 +1,7 @@
+in ``/ressourse``
+
+SSL certificate ``/resourse/private/secrets/configuration``
+
+Database ``/resourse/private/database-data.dump''
+
+Docroot``/resourse/docroot``
From e48dd93cb486d075a37c20f57fd1362c5132ff18 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 09:36:14 +0200
Subject: [PATCH 097/354] Rename prerequisitResourses to
prerequisitsResourses.rts
---
doc/{prerequisitResourses => prerequisitsResourses.rts} | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename doc/{prerequisitResourses => prerequisitsResourses.rts} (100%)
diff --git a/doc/prerequisitResourses b/doc/prerequisitsResourses.rts
similarity index 100%
rename from doc/prerequisitResourses
rename to doc/prerequisitsResourses.rts
From 3283acd72e3d5d9420cc353cf491e68cf15a54c1 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 09:36:50 +0200
Subject: [PATCH 098/354] Rename prerequisitsResourses.rts to
prerequisitsResourses.rst
---
doc/{prerequisitsResourses.rts => prerequisitsResourses.rst} | 0
1 file changed, 0 insertions(+), 0 deletions(-)
rename doc/{prerequisitsResourses.rts => prerequisitsResourses.rst} (100%)
diff --git a/doc/prerequisitsResourses.rts b/doc/prerequisitsResourses.rst
similarity index 100%
rename from doc/prerequisitsResourses.rts
rename to doc/prerequisitsResourses.rst
From d6e40c6c14c28f6b5255ddb0d2c7b748477daecc Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 09:42:39 +0200
Subject: [PATCH 099/354] Update prerequisitsResourses.rst
---
doc/prerequisitsResourses.rst | 11 +++++++++--
1 file changed, 9 insertions(+), 2 deletions(-)
diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst
index 777ae62..52ce2aa 100644
--- a/doc/prerequisitsResourses.rst
+++ b/doc/prerequisitsResourses.rst
@@ -1,7 +1,14 @@
+
in ``/ressourse``
-SSL certificate ``/resourse/private/secrets/configuration``
+SSL certificate ``/resourse/private/configuration``
+
+Database ``/resourse/private/database-data``
-Database ``/resourse/private/database-data.dump''
+Secrets ``/resourse/private/secrets``
Docroot``/resourse/docroot``
+
+.. code-block:: bash
+
+ cp /resourse/* $DISTRIB/
From e6c95431d41d978a1d9047312e19340e29d10ba1 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 09:42:59 +0200
Subject: [PATCH 100/354] Update prerequisitsResourses.rst
---
doc/prerequisitsResourses.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst
index 52ce2aa..5f5a3f4 100644
--- a/doc/prerequisitsResourses.rst
+++ b/doc/prerequisitsResourses.rst
@@ -7,7 +7,7 @@ Database ``/resourse/private/database-data``
Secrets ``/resourse/private/secrets``
-Docroot``/resourse/docroot``
+Docroot ``/resourse/docroot``
.. code-block:: bash
From b8368b7d9a549f56d6e80eb1222731a5f8a3cccf Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 09:47:33 +0200
Subject: [PATCH 101/354] Create testing.rst
---
doc/testing.rst | 1 +
1 file changed, 1 insertion(+)
create mode 100644 doc/testing.rst
diff --git a/doc/testing.rst b/doc/testing.rst
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/doc/testing.rst
@@ -0,0 +1 @@
+
From 51b302b3d67b813e3db6501d9884932bdbf0603d Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 09:56:25 +0200
Subject: [PATCH 102/354] Update installation.rst
---
doc/installation.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index 63dbb1b..09d9476 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -55,6 +55,13 @@ Clone the git
az acr login --name presacrd4oilmd5ss77y
docker network create traefik
+if using pre-made resourses archive
+.. code-block:: bash
+ tar -xvf resourse.tar $DISTRIB/
+ cp $DISTRIB/private/secrets/.env $DISTRIB/dataverse-docker/.env
+
+skip to cronjob instalation step
+
The following variables need to be changed in .env
.. code-block:: bash
From 55a997ca9c37df82196a4c487fe157b3b3a05ef4 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 10:26:04 +0200
Subject: [PATCH 103/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 09d9476..baba9e6 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -57,8 +57,8 @@ Clone the git
if using pre-made resourses archive
.. code-block:: bash
- tar -xvf resourse.tar $DISTRIB/
- cp $DISTRIB/private/secrets/.env $DISTRIB/dataverse-docker/.env
+ tar -xvzf /tmp/resourse.tar $DISTRIB/
+ cp $DISTRIB/.env $DISTRIB/dataverse-docker/.env
skip to cronjob instalation step
From 358a1b35fd45effa8a351031265249e9869a949b Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 10:29:05 +0200
Subject: [PATCH 104/354] Update installation.rst
---
doc/installation.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index baba9e6..79a952e 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -56,7 +56,9 @@ Clone the git
docker network create traefik
if using pre-made resourses archive
+
.. code-block:: bash
+
tar -xvzf /tmp/resourse.tar $DISTRIB/
cp $DISTRIB/.env $DISTRIB/dataverse-docker/.env
From 9d93431b9ac7ddee90a1e5e68fc903b279663975 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 10:56:24 +0200
Subject: [PATCH 105/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 79a952e..51d647f 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -60,7 +60,7 @@ if using pre-made resourses archive
.. code-block:: bash
tar -xvzf /tmp/resourse.tar $DISTRIB/
- cp $DISTRIB/.env $DISTRIB/dataverse-docker/.env
+ cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/
skip to cronjob instalation step
From 8da851bf390fcc67ed4ce03452cb2a29243b949f Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:09:09 +0200
Subject: [PATCH 106/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 51d647f..c13274e 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -147,9 +147,9 @@ Apache and shibboleth configuration
-----------------------------------
Apache configuration
-Change domain name
+Change domain name in
-Set up shibboleth
+Set up shibboleth ``shibboleth/shibboleth2.xml``
Copy keyen.sh comand
From da83e024048e15d23c580b8bde3ee1ec1f16d437 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:29:00 +0200
Subject: [PATCH 107/354] Update prerequisitsResourses.rst
---
doc/prerequisitsResourses.rst | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst
index 5f5a3f4..afdfd9b 100644
--- a/doc/prerequisitsResourses.rst
+++ b/doc/prerequisitsResourses.rst
@@ -7,8 +7,14 @@ Database ``/resourse/private/database-data``
Secrets ``/resourse/private/secrets``
+Shibboleth ``/resourse/private/shibboleth``
+
+env ``/resourse/private/.env``
+
Docroot ``/resourse/docroot``
+
+
.. code-block:: bash
cp /resourse/* $DISTRIB/
From 5c7e9485914e6106b795a2af5baa3972f0c2c1da Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:33:58 +0200
Subject: [PATCH 108/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index c13274e..201a2e0 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -58,8 +58,8 @@ Clone the git
if using pre-made resourses archive
.. code-block:: bash
-
- tar -xvzf /tmp/resourse.tar $DISTRIB/
+ cd /$DISTRIB
+ tar -xvzf /tmp/resourse.tar.gz
cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/
skip to cronjob instalation step
From a23a0e11c501a1ef5536472ddb4ff6077e24515c Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:36:14 +0200
Subject: [PATCH 109/354] Update installation.rst
---
doc/installation.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index 201a2e0..f8dfa99 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -58,6 +58,7 @@ Clone the git
if using pre-made resourses archive
.. code-block:: bash
+
cd /$DISTRIB
tar -xvzf /tmp/resourse.tar.gz
cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/
From 6586fb8d145f954f3a3cbc14f5ab4fb962e59b4e Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:36:55 +0200
Subject: [PATCH 110/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index f8dfa99..fc9f64f 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -60,7 +60,7 @@ if using pre-made resourses archive
.. code-block:: bash
cd /$DISTRIB
- tar -xvzf /tmp/resourse.tar.gz
+ tar -xvzf /tmp/resourses.tar.gz
cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/
skip to cronjob instalation step
From 5fa6b59abc9f456efcd5f31984d4570b7a90beb2 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:54:18 +0200
Subject: [PATCH 111/354] Update installation.rst
---
doc/installation.rst | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index fc9f64f..5f2a09e 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -63,7 +63,7 @@ if using pre-made resourses archive
tar -xvzf /tmp/resourses.tar.gz
cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/
-skip to cronjob instalation step
+skip to checking step
The following variables need to be changed in .env
@@ -157,8 +157,9 @@ Copy keyen.sh comand
Check that your dataverse instalation is axessible
--------------------------------------------------
.. code-block:: bash
-cd $DISTRIB/dataverse-docker/
-docker-compose up -d
+
+ cd $DISTRIB/dataverse-docker/
+ docker-compose up -d
Cronjob to automatically restart dataverse
------------------------------------------
From d2e5cc4dadaec456ec9e39de96d0fd1219b695ce Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:57:09 +0200
Subject: [PATCH 112/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 5f2a09e..639fc81 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -63,7 +63,7 @@ if using pre-made resourses archive
tar -xvzf /tmp/resourses.tar.gz
cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/
-skip to checking step
+go to "Check that your dataverse instalation is accessible"
The following variables need to be changed in .env
@@ -154,7 +154,7 @@ Set up shibboleth ``shibboleth/shibboleth2.xml``
Copy keyen.sh comand
-Check that your dataverse instalation is axessible
+Check that your dataverse instalation is accessible
--------------------------------------------------
.. code-block:: bash
From 4da30ee69f0394a3c1413b3dd57a1782c8c3ebd5 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 9 Sep 2022 11:57:53 +0200
Subject: [PATCH 113/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 639fc81..ed63191 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -154,8 +154,8 @@ Set up shibboleth ``shibboleth/shibboleth2.xml``
Copy keyen.sh comand
-Check that your dataverse instalation is accessible
---------------------------------------------------
+Check that your dataverse installation is accessible
+----------------------------------------------------
.. code-block:: bash
cd $DISTRIB/dataverse-docker/
From a809dee859dcd857a5c02d5533c489dc7e5aef2d Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 16 Sep 2022 10:10:52 +0200
Subject: [PATCH 114/354] Create maintenance.rst
---
doc/maintenance.rst | 96 +++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 96 insertions(+)
create mode 100644 doc/maintenance.rst
diff --git a/doc/maintenance.rst b/doc/maintenance.rst
new file mode 100644
index 0000000..e1f531f
--- /dev/null
+++ b/doc/maintenance.rst
@@ -0,0 +1,96 @@
+Create dump of production database
+----------------------------------
+
+connect using ssh to production VM
+
+if you are using a dockerized version : ``docker exec -it postgress /bin/sh``
+
+.. code-block:: bash
+
+ Su postgress
+ pg_dump -U dataverse dataverse > /tmp/dataverse.dump;
+
+Transmit dump file to appropriate vm using rsync ``rsync -arvzP --rsh=ssh
+ Test and Demo only
From c3e0b27fd64d7fae28ac3d80e9b30bd5776af1c3 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 3 Jan 2023 07:34:02 +0000
Subject: [PATCH 161/354] fixed curation lable (special char not allowed)
---
distros/dataverse.no/init.d/024-curation-lables.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/024-curation-lables.sh b/distros/dataverse.no/init.d/024-curation-lables.sh
index ae4278e..0fa922c 100755
--- a/distros/dataverse.no/init.d/024-curation-lables.sh
+++ b/distros/dataverse.no/init.d/024-curation-lables.sh
@@ -1,2 +1,2 @@
#!/bin/bash
-curl -X PUT -d '{"Standard Process":["Curator Assigned", "In Curation", "Awaiting Reply", "Legal/Ethical Concerns", "Awaiting Final Approval", "In Double-Blind Review", "Awaiting Article Publication", "Candidate for Deletion"], "Alternate Process":["State 1","State 2","State 3"]}' http://localhost:8080/api/admin/settings/:AllowedCurationLabels
+curl -X PUT -d '{"Standard Process":["Curator Assigned", "In Curation", "Awaiting Reply", "Legal or Ethical Concerns", "Awaiting Final Approval", "In Double Blind Review", "Awaiting Article Publication", "Candidate for Deletion"], "Alternate Process":["State 1","State 2","State 3"]}' http://localhost:8080/api/admin/settings/:AllowedCurationLabels
From 9eff5e4a0c7f47cc8fdc80ad7f03ec03c4d32028 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 4 Jan 2023 15:05:27 +0000
Subject: [PATCH 162/354] custom header/footer
---
.../init.d/101-header-footer-custumisation.sh | 8 +++
.../modification/custom-footer.html | 69 +++++++++++++++++++
.../modification/custom-header.html | 33 +++++++++
3 files changed, 110 insertions(+)
create mode 100755 distros/dataverse.no/init.d/101-header-footer-custumisation.sh
create mode 100644 distros/dataverse.no/modification/custom-footer.html
create mode 100644 distros/dataverse.no/modification/custom-header.html
diff --git a/distros/dataverse.no/init.d/101-header-footer-custumisation.sh b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
new file mode 100755
index 0000000..c4fb950
--- /dev/null
+++ b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/dataverse_header.xhtml -O /tmp/custom-header.xhtml
+wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/dataverse_footer.xhtml -O /tmp/custom-footer.xhtml
+
+curl -X PUT -d '/logos/navbar/logo.png' http://localhost:8080/api/admin/settings/:LogoCustomizationFile
+curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile
+curl -X PUT -d '/tmp/custom-footer.html' http://localhost:8080/api/admin/settings/:FooterCustomizationFile
diff --git a/distros/dataverse.no/modification/custom-footer.html b/distros/dataverse.no/modification/custom-footer.html
new file mode 100644
index 0000000..ef74808
--- /dev/null
+++ b/distros/dataverse.no/modification/custom-footer.html
@@ -0,0 +1,69 @@
+
+
diff --git a/distros/dataverse.no/modification/custom-header.html b/distros/dataverse.no/modification/custom-header.html
new file mode 100644
index 0000000..7789e2a
--- /dev/null
+++ b/distros/dataverse.no/modification/custom-header.html
@@ -0,0 +1,33 @@
+
+
Test and Demo only
From 8c189ea825a45050ae042010e14da16cdafcd31a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 6 Jan 2023 13:39:17 +0000
Subject: [PATCH 165/354] updated color footer
---
distros/dataverse.no/modification/custom-header.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/modification/custom-header.html b/distros/dataverse.no/modification/custom-header.html
index 1899cb1..dc606a2 100644
--- a/distros/dataverse.no/modification/custom-header.html
+++ b/distros/dataverse.no/modification/custom-header.html
@@ -11,7 +11,7 @@
/* Place narrow custom header at the top of the page... */
#custom-header {
- background: #414141;
+ background: #FBFBFB;
min-height: 44px;
position: fixed;
right: 0;
From 766103203ba542b55bfb3195841ff350c4f35204 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 10 Jan 2023 10:37:42 +0100
Subject: [PATCH 166/354] delete actionlogrecord
---
doc/maintenance.rst | 20 ++++++++++++++++----
1 file changed, 16 insertions(+), 4 deletions(-)
diff --git a/doc/maintenance.rst b/doc/maintenance.rst
index 7eaf941..e3d71b4 100644
--- a/doc/maintenance.rst
+++ b/doc/maintenance.rst
@@ -135,35 +135,47 @@ File stored in local
the following update statement is to update the files while not affecting the external datasets harvested form other locations listed in table 'dataset'.
-.. code-block:: bash
+.. code-block:: sql
UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
the following update statement is to update the datasets while not affecting the external datasets harvested form other locations listed in table 'dataset'.
-.. code-block:: bash
+.. code-block:: sql
UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
in the following exemple = S3 and = 2002-green-dataversenotest1
-.. code-block:: bash
+.. code-block:: sql
UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE and o.dtype = 'DataFile' AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
exemple to update for a specifics owner:
-.. code-block:: bash
+.. code-block:: sql
UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
Get MDF5 for the files uploaded today
-------------------------------------
+.. code-block:: sql
+
select * from dvobject as dv, datafile as df where dv.dtype='DataFile' and modificationtime>='2022-09-20' and dv.id=df.id order by df.id desc limit 10;
the mdf is corespmding to the etag in cloudian
+Get MDF5 for the files uploaded today
+-------------------------------------
+
+Delete action logs older then 90 days
+
+.. code-block:: sql
+
+DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days';
+
+
From 1cf8f98249f01870517057e77dd0cec374e20ce0 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 10 Jan 2023 10:38:46 +0100
Subject: [PATCH 167/354] Update maintenance.rst
---
doc/maintenance.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/maintenance.rst b/doc/maintenance.rst
index e3d71b4..4091cc6 100644
--- a/doc/maintenance.rst
+++ b/doc/maintenance.rst
@@ -163,7 +163,7 @@ Get MDF5 for the files uploaded today
.. code-block:: sql
-select * from dvobject as dv, datafile as df where dv.dtype='DataFile' and modificationtime>='2022-09-20' and dv.id=df.id order by df.id desc limit 10;
+ select * from dvobject as dv, datafile as df where dv.dtype='DataFile' and modificationtime>='2022-09-20' and dv.id=df.id order by df.id desc limit 10;
the mdf is corespmding to the etag in cloudian
@@ -176,6 +176,6 @@ Delete action logs older then 90 days
.. code-block:: sql
-DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days';
+ DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days';
From 3a9b81cf38a6d8df35d4f9e4618283eb729671c9 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 11 Jan 2023 10:22:38 +0000
Subject: [PATCH 168/354] removed shoulder FK2
---
distros/dataverse.no/init.d/01-persistent-id.sh | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/01-persistent-id.sh b/distros/dataverse.no/init.d/01-persistent-id.sh
index 98581e7..fb9294f 100755
--- a/distros/dataverse.no/init.d/01-persistent-id.sh
+++ b/distros/dataverse.no/init.d/01-persistent-id.sh
@@ -10,13 +10,14 @@ curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY
curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy
curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods
curl -X PUT -d solr:8983 "$SERVER/admin/settings/:SolrHostColonPort"
+curl -X PUT -d "" "$SERVER/admin/settings/:Shoulder"
echo
# Demo server with FAKE DOIs if doi_authority is empty
if [ -z "${doi_authority}" ]; then
curl -X PUT -d doi "$SERVER/admin/settings/:Protocol"
curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority"
- curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder"
+# curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder"
curl -X PUT -d FAKE "$SERVER/admin/settings/:DoiProvider"
fi
From d53a40581b30b225d9667de3ccd40ef3d06ad7ec Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 13 Jan 2023 08:57:14 +0000
Subject: [PATCH 169/354] make data count
---
.../init.d/003-counterprocessor.sh | 21 +++++++++++++++++++
1 file changed, 21 insertions(+)
create mode 100755 distros/dataverse.no/init.d/003-counterprocessor.sh
diff --git a/distros/dataverse.no/init.d/003-counterprocessor.sh b/distros/dataverse.no/init.d/003-counterprocessor.sh
new file mode 100755
index 0000000..dcd4e33
--- /dev/null
+++ b/distros/dataverse.no/init.d/003-counterprocessor.sh
@@ -0,0 +1,21 @@
+
+#!/bin/bash
+
+mkdir /opt/payara/counter-processor
+cd /opt/payara/counter-processor
+wget https://github.com/CDLUC3/counter-processor/archive/v${COUNTERPROSVERSION}.tar.gz -O v${COUNTERPROSVERSION}.tar.gz
+tar xvfz v${COUNTERPROSVERSION}.tar.gz
+cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}
+curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=${GEOIPLICENSE}&suffix=tar.gz" -o GeoLite2-Country.tar.gz \
+ && tar -xzvf GeoLite2-Country.tar.gz \
+ && mv GeoLite2-Country_*/GeoLite2-Country.mmdb /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/maxmind_geoip
+
+wget https://guides.dataverse.org/en/latest/_downloads/a65ffc2dba9f406858591558ae92790c/setup-counter-processor.sh -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/setup-counter-processor.sh
+wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce5e6b83e/counter-processor-config.yaml -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/counter-processor-config.yaml
+
+curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/makeDataCount' http://localhost:8080/api/admin/settings/:MDCLogPath
+curl -X PUT -d 'false' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics
+
+pip3 install -r requirements.txt --ignore-installed PyYAML
+export ALLOWED_ENV=year_month
+
From e9a917d3b03e0b939f16760f748d4d16f40dfb25 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Fri, 13 Jan 2023 12:58:05 +0100
Subject: [PATCH 170/354] Update installation.rst
---
doc/installation.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index ed63191..fede133 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -11,6 +11,7 @@ install Prerequisites, docker, docker-compose, and git, azure-cli
apt-get install \
ca-certificates \
curl \
+ azure-cli \
gnupg \
lsb-release
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
From d252331c721918f53c470caa178686e5d5e3dc51 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 17 Jan 2023 11:45:43 +0000
Subject: [PATCH 171/354] removed minio /updated footer/header
---
distros/dataverse.no/docker-compose.yaml | 52 -------------------
.../init.d/101-header-footer-custumisation.sh | 2 +
2 files changed, 2 insertions(+), 52 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 92ecfa2..f56fd98 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -115,51 +115,6 @@ services:
ports:
- 8095:80
- minio:
- # Please use fixed versions :D
- image: minio/minio:RELEASE.2022-03-22T02-05-10Z
- container_name: minio
- user: root
- networks:
- - traefik
- volumes:
- - ${CONFIGURATION_PATH}/secrets/minio:/run/secrets
- - ${LOCAL_STORAGE}:/data
- - /mnt:/mnt
- #- /mnt/minio-data:/data/.minio.sys/tmp
- ports:
- - "9000:9000"
- - "9017:9017"
- command:
- - server
- - /data
- - --console-address
- - ":9017"
- environment:
- - "MINIO_ROOT_USER"
- - "MINIO_ROOT_PASSWORD"
- - "MINIO_BROWSER=off"
- #- "MINIO_ACCESS_KEY_FILE=/run/secrets/minio_access_key"
- #- "MINIO_SECRET_KEY_FILE=/run/secrets/minio_secret_key"
- # Do NOT use MINIO_DOMAIN or MINIO_SERVER_URL with Traefik.
- # All Routing is done by Traefik, just tell minio where to redirect to.
- - MINIO_BROWSER_REDIRECT_URL=http://stash.localhost
- labels:
- - traefik.enable=true
- - traefik.http.routers.minio.service=minio
- - "traefik.http.routers.minio.rule=Host(`${traefikhost}`) && PathPrefix(`/storage`)"
- - "traefik.http.services.minio.loadbalancer.server.port=9016"
- #- traefik.http.routers.minio.rule=Host(`minio.${traefikhost}`)
- #- traefik.http.services.minio.loadbalancer.server.port=9000
- - traefik.http.routers.minio-console.service=minio-console
- #- traefik.http.routers.minio-console.rule=Host(`minio-stash.${traefikhost}`)
- - "traefik.http.routers.minio-console.rule=Host(`${traefikhost}`) && PathPrefix(`/console`)"
- - "traefik.http.services.minio-console.loadbalancer.server.port=9017"
- - "traefik.http.routers.minio.tls=true"
- - "traefik.http.routers.minio.tls.certresolver=myresolver"
- - "traefik.http.routers.minio-console.tls=true"
- - "traefik.http.routers.minio-console.tls.certresolver=myresolver"
-
whoami:
networks:
- traefik
@@ -214,14 +169,7 @@ services:
- "socket_port"
- "federated_json_file"
- "bucketname_1"
- - "minio_label_1"
- - "minio_bucket_1"
- - "minio_custom_endpoint"
- "bucketname_2"
- - "minio_profile_1"
- - "minio_label_2"
- - "minio_bucket_2"
- - "minio_profile_2"
- "DATAVERSE_DB_HOST"
- "DATAVERSE_DB_USER"
- "DATAVERSE_DB_PASSWORD"
diff --git a/distros/dataverse.no/init.d/101-header-footer-custumisation.sh b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
index 19adb21..1dd49db 100755
--- a/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
+++ b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
@@ -7,3 +7,5 @@ curl -X PUT -d '/logos/navbar/logo.png' http://localhost:8080/api/admin/settings
curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile
curl -X PUT -d '/tmp/custom-footer.html' http://localhost:8080/api/admin/settings/:FooterCustomizationFile
curl -X PUT -d http://site.uit.no/dataverseno/deposit/ http://localhost:8080/api/admin/settings/:GuidesBaseUrl
+curl -X PUT -d '' http://localhost:8080/api/admin/settings/:GuidesVersion
+curl -X PUT -d https://site.uit.no/dataverseno/support/ http://localhost:8080/api/admin/settings/:NavbarSupportUrl
From 41beb58efc8f8c71980a1f88fb4d260a1e2f3c60 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 23 Jan 2023 09:58:23 +0000
Subject: [PATCH 172/354] Cleaned patch file for Bundle.property
---
.../modification/Bundle.properties.patch | 36 -------------------
1 file changed, 36 deletions(-)
diff --git a/distros/dataverse.no/modification/Bundle.properties.patch b/distros/dataverse.no/modification/Bundle.properties.patch
index 1a04e63..7dc1883 100644
--- a/distros/dataverse.no/modification/Bundle.properties.patch
+++ b/distros/dataverse.no/modification/Bundle.properties.patch
@@ -1,14 +1,5 @@
--- Bundle.properties 2021-08-04 19:13:08.000000000 +0000
+++ /root/git/dataverse-docker/distros/dataverse.no/modification/Bundle.properties 2022-07-12 10:41:34.201813777 +0000
-@@ -188,7 +188,7 @@
- # TODO: Confirm that "toReview" can be deleted.
- toReview=Don't forget to publish it or send it back to the contributor!
- # Bundle file editors, please note that "notification.welcome" is used in a unit test.
--notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the {1}. Want to test out Dataverse features? Use our {2}. Also, check for your welcome email to verify your address.
-+notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check our user guides at http://site.uit.no/dataverseno/deposit/ or contact DataverseNO support for assistance: http://site.uit.no/dataverseno/support/.
- notification.demoSite=Demo Site
- notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}).
- notification.grantFileAccess=Access granted for files in dataset: {0}.
@@ -303,8 +303,8 @@
login.forgot.text=Forgot your password?
login.builtin=Dataverse Account
@@ -20,30 +11,3 @@
login.builtin.credential.usernameOrEmail=Username/Email
login.builtin.credential.password=Password
login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
-@@ -704,7 +704,7 @@
- notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}).
- notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}).
- notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen.
--notification.email.closing=\n\nYou may contact us for support at {0}.\n\nThank you,\n{1}
-+notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1}
- notification.email.closing.html=
You may contact us for support at {0}.
Thank you, {1}
- notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}).
- notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}).
-@@ -876,7 +876,7 @@
- dataverse.results.btn.addData.newDataset=New Dataset
- dataverse.results.dialog.addDataGuest.header=Add Data
- dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset.
--dataverse.results.dialog.addDataGuest.msg.signup=Sign up or log in to create a dataverse or add a dataset.
-+dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset.
- dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account
- dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account
- dataverse.results.types.dataverses=Dataverses
-@@ -1657,7 +1657,7 @@
- file.more.information.link=Link to more file information for
- file.requestAccess=Request Access
- file.requestAccess.dialog.msg=You need to Log In to request access.
--file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
-+file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access.
- file.accessRequested=Access Requested
- file.ingestInProgress=Ingest in progress...
- file.dataFilesTab.metadata.header=Metadata
From d0b099bf2062a3f98adddecf9f475fe75a07c866 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 23 Jan 2023 10:24:43 +0000
Subject: [PATCH 173/354] updated navbar url
---
distros/dataverse.no/init.d/01-persistent-id.sh | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/01-persistent-id.sh b/distros/dataverse.no/init.d/01-persistent-id.sh
index fb9294f..d1518b1 100755
--- a/distros/dataverse.no/init.d/01-persistent-id.sh
+++ b/distros/dataverse.no/init.d/01-persistent-id.sh
@@ -3,7 +3,9 @@ echo "Setting up the settings" >> /tmp/status.log
echo "- Allow internal signup" >> /tmp/status.log
SERVER=http://${DATAVERSE_URL}/api
echo $SERVER
-curl -X PUT -d yes "$SERVER/admin/settings/:AllowSignUp"
+curl -X PUT -d https://site.uit.no/dataverseno/support/ "$SERVER/admin/settings/:NavbarSupportUrl"
+curl -X PUT -d http://site.uit.no/dataverseno/deposit/ "$SERVER/admin/settings/:NavbarGuidesUrl"
+curl -X PUT -d 'false' "$SERVER/admin/settings/:AllowSignUp"
curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl"
curl -X PUT -d CV "$SERVER/admin/settings/:CV"
curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY
From 86ff8fb1ac130ba925f00302e0e95b24da07655c Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 2 Feb 2023 11:47:11 +0000
Subject: [PATCH 174/354] orcid login
---
distros/dataverse.no/docker-compose.yaml | 1 +
distros/dataverse.no/init.d/082-orcid.sh | 8 ++++++++
2 files changed, 9 insertions(+)
create mode 100644 distros/dataverse.no/init.d/082-orcid.sh
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index f56fd98..b69fea2 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -160,6 +160,7 @@ services:
- "aws_uit_bucket_name"
- "aws_uit_s3_profile"
- "azure_json_file"
+ - "orcid_json_file"
- "system_email"
- "mailhost"
- "mailuser"
diff --git a/distros/dataverse.no/init.d/082-orcid.sh b/distros/dataverse.no/init.d/082-orcid.sh
new file mode 100644
index 0000000..6d97fa1
--- /dev/null
+++ b/distros/dataverse.no/init.d/082-orcid.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+
+# Federated login activation
+# https://guides.dataverse.org/en/latest/installation/shibboleth.html
+if [ "${orcid_json_file}" ]; then
+ curl -X POST -H 'Content-type: application/json' --upload-file ${orcid_json_file} http://localhost:8080/api/admin/authenticationProviders
+fi
From 484729db151cdf15cd4088ea796307706101af7c Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 3 Feb 2023 11:10:08 +0000
Subject: [PATCH 175/354] updateded shibboleth apache
---
distros/dataverse.no/docker-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index b69fea2..197bd1e 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -57,8 +57,8 @@ services:
shibboleth:
networks:
- traefik
- #image: shibboleth:3.3.0
- image: ${DOCKER_HUB}/shibboleth:3.3.0
+ image: test02/shibboleth:latest
+ #image: ${DOCKER_HUB}/shibboleth:3.3.0
container_name: shibboleth
privileged: true
ports:
From 04f6d6632b3294d2fbd5a8319b0192c5ffc8afb2 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 3 Feb 2023 11:14:00 +0000
Subject: [PATCH 176/354] fixed shibb container
---
distros/dataverse.no/docker-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 197bd1e..c15bf80 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -57,8 +57,8 @@ services:
shibboleth:
networks:
- traefik
- image: test02/shibboleth:latest
- #image: ${DOCKER_HUB}/shibboleth:3.3.0
+ #image: test02/shibboleth:latest
+ image: ${DOCKER_HUB}/shibboleth:3.3.0.A2.37
container_name: shibboleth
privileged: true
ports:
From 05ecaeff18a122ba5aa857c2eeec8af1046b678b Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 7 Feb 2023 11:14:48 +0100
Subject: [PATCH 177/354] added -y
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index fede133..b62578c 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -8,7 +8,7 @@ install Prerequisites, docker, docker-compose, and git, azure-cli
sudo su
apt-get update
- apt-get install \
+ apt-get install -y \
ca-certificates \
curl \
azure-cli \
From ea01bb46fc2dfa01f36f2ad544199bfd590b4484 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 7 Feb 2023 11:19:06 +0100
Subject: [PATCH 178/354] DOCKER HUB
---
doc/prerequisitsResourses.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst
index afdfd9b..def6359 100644
--- a/doc/prerequisitsResourses.rst
+++ b/doc/prerequisitsResourses.rst
@@ -14,6 +14,8 @@ env ``/resourse/private/.env``
Docroot ``/resourse/docroot``
+GET ACESS TO THE DOCKER HUB!!!!!!!!
+
.. code-block:: bash
From 78c1bc1df94cc72f343bfc1d4ec1d02559ce09bc Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Feb 2023 12:49:03 +0100
Subject: [PATCH 179/354] Update installation.rst
---
doc/installation.rst | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index b62578c..9f4b363 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -1,8 +1,11 @@
Dataverse installation on Microsoft Azure
=========================================
-Prerequisites : have sudo rights
-install Prerequisites, docker, docker-compose, and git, azure-cli
+
+Prerequisites : SSH to the VM as the administrator and make sure that you have sudo rights to install docker, docker-compose, and git, azure-cli
+
+installation of docker, docker-compose, and git, azure-cli
+----------------------------------------------------------
.. code-block:: bash
From 215a8fa8e319f159f0a40954b918b101a9847d8d Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Feb 2023 12:58:34 +0100
Subject: [PATCH 180/354] Update installation.rst
---
doc/installation.rst | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 9f4b363..55b28c2 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -2,9 +2,10 @@ Dataverse installation on Microsoft Azure
=========================================
-Prerequisites : SSH to the VM as the administrator and make sure that you have sudo rights to install docker, docker-compose, and git, azure-cli
+Prerequisites: SSH to the VM as the administrator and make sure that you have sudo rights to install [Docker](https://https://www.docker.com/)
+, docker-compose, and git, azure-cli
-installation of docker, docker-compose, and git, azure-cli
+Installation of docker, docker-compose, and git, azure-cli
----------------------------------------------------------
.. code-block:: bash
From 1a5d7c89f642fe3c762daa6473929587779d19c9 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Feb 2023 13:06:50 +0100
Subject: [PATCH 181/354] Update installation.rst
---
doc/installation.rst | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 55b28c2..34c607c 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -2,8 +2,10 @@ Dataverse installation on Microsoft Azure
=========================================
-Prerequisites: SSH to the VM as the administrator and make sure that you have sudo rights to install [Docker](https://https://www.docker.com/)
-, docker-compose, and git, azure-cli
+Prerequisites: SSH to the VM as the administrator and make sure that you have sudo rights to install the following:
+- [Docker and Docker-compose](https://https://www.docker.com/)
+- [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
+- [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
Installation of docker, docker-compose, and git, azure-cli
----------------------------------------------------------
From a4d9ddf9f26216a6c5316afdae1e759c9876b23d Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Feb 2023 13:13:02 +0100
Subject: [PATCH 182/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 34c607c..401ee41 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -4,8 +4,8 @@ Dataverse installation on Microsoft Azure
Prerequisites: SSH to the VM as the administrator and make sure that you have sudo rights to install the following:
- [Docker and Docker-compose](https://https://www.docker.com/)
-- [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
-- [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
+* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
++ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
Installation of docker, docker-compose, and git, azure-cli
----------------------------------------------------------
From 80514e92fcace165407fd2801fcd0e7b6cd7b0cb Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Feb 2023 13:14:24 +0100
Subject: [PATCH 183/354] Update installation.rst
---
doc/installation.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index 401ee41..bb08c99 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -3,6 +3,7 @@ Dataverse installation on Microsoft Azure
Prerequisites: SSH to the VM as the administrator and make sure that you have sudo rights to install the following:
+
- [Docker and Docker-compose](https://https://www.docker.com/)
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
From 5bb1702c112faed3bf719550166a50aedc417a32 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Feb 2023 13:20:34 +0100
Subject: [PATCH 184/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index bb08c99..2c1b68e 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -2,7 +2,7 @@ Dataverse installation on Microsoft Azure
=========================================
-Prerequisites: SSH to the VM as the administrator and make sure that you have sudo rights to install the following:
+Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install the following:
- [Docker and Docker-compose](https://https://www.docker.com/)
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
From ea38b6cc01e18b1745bfb67605512dc05d8ecd8e Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Feb 2023 14:15:01 +0100
Subject: [PATCH 185/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 2c1b68e..0d54f5e 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -36,7 +36,7 @@ Installation of docker, docker-compose, and git, azure-cli
Dataverse root folder
---------------------
-defined in ``CONFIGURATION_PATH`` and ``DOCROOT`` default : ``/distrib/``
+Create a folder for secrets and define it in ``CONFIGURATION_PATH`` and ``DOCROOT`` default : ``/distrib/``
.. code-block:: bash
From 83f107546d2cf96e9259074d0cce9b054776a993 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 7 Feb 2023 14:53:57 +0100
Subject: [PATCH 186/354] added curl
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 0d54f5e..770572c 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -141,7 +141,7 @@ certificates should be put in ``$CONFIGURATION_PATH/configuration/files`` there
The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificates.toml``
-Check the certificates with ``curl -placeholder hostname``
+Check the certificates with ``curl --insecure -vvI https://0.0.0.0:443 2>&1 | awk 'BEGIN { cert=0 } /^\* SSL connection/ { cert=1 } /^\*/ { if (cert) print }'``
DOCROOT
From 7e6ad2d71ccd5ae7a900f8e20f7ac05117060407 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Thu, 9 Feb 2023 09:48:04 +0100
Subject: [PATCH 187/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 770572c..28418a3 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -8,7 +8,7 @@ Prerequisites: SSH to the working VM as the administrator and make sure that you
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
-Installation of docker, docker-compose, and git, azure-cli
+Installation of docker, docker-compose, git amd, azure-cli
----------------------------------------------------------
.. code-block:: bash
From 6514e6218aa29174d0859941de0b32ba808857b4 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Thu, 9 Feb 2023 09:48:30 +0100
Subject: [PATCH 188/354] Update installation.rst
---
doc/installation.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 28418a3..0b51d98 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -8,7 +8,7 @@ Prerequisites: SSH to the working VM as the administrator and make sure that you
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
-Installation of docker, docker-compose, git amd, azure-cli
+Installation of docker, docker-compose, git and, azure-cli
----------------------------------------------------------
.. code-block:: bash
From 50716100d9c10644120926ccbabba01bba3b9dc9 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Thu, 9 Feb 2023 10:02:45 +0100
Subject: [PATCH 189/354] Update installation.rst
---
doc/installation.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index 0b51d98..5880a25 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -52,6 +52,7 @@ Create a folder for secrets and define it in ``CONFIGURATION_PATH`` and ``DOCROO
Clone the git
-------------
+It is assumed here that you have created a project and a git repository. See [GitHub](https://docs.github.com) on how to create a new project/repo or repo from an existing project.
.. code-block:: bash
git clone https://github.com/DataverseNO/dataverse-docker.git
From 86de54817f7a8b82ed65a9ab569438fb6cff39a3 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Thu, 9 Feb 2023 10:04:10 +0100
Subject: [PATCH 190/354] Update installation.rst
---
doc/installation.rst | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 5880a25..ce28857 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -52,7 +52,8 @@ Create a folder for secrets and define it in ``CONFIGURATION_PATH`` and ``DOCROO
Clone the git
-------------
-It is assumed here that you have created a project and a git repository. See [GitHub](https://docs.github.com) on how to create a new project/repo or repo from an existing project.
+It is assumed here that you have already created a project and a git repository. See [GitHub](https://docs.github.com) on how to create a new project/repo or repo from an existing project.
+
.. code-block:: bash
git clone https://github.com/DataverseNO/dataverse-docker.git
From a1e4251ef29e91991f5cf267de2d86af8c7d24bc Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Thu, 9 Feb 2023 10:17:26 +0100
Subject: [PATCH 191/354] Update installation.rst
---
doc/installation.rst | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index ce28857..9fd9ac8 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -65,7 +65,11 @@ It is assumed here that you have already created a project and a git repository.
az acr login --name presacrd4oilmd5ss77y
docker network create traefik
-if using pre-made resourses archive
+Environment variables
+---------------------
+If you are using docker-compose, you can skip setting the environment variables manually, as they will be set in the docker-compose.yml file or a .env file.
+
+We have a pre-configured environment variables (.env) stored at our resource archive
.. code-block:: bash
From 82e9190a0171e06da847d72fd7d80b581c44e4ed Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 9 Feb 2023 11:14:01 +0100
Subject: [PATCH 192/354] Update installation.rst
---
doc/installation.rst | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index 9fd9ac8..3b0c291 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -1,6 +1,23 @@
Dataverse installation on Microsoft Azure
=========================================
+Dependencies
+------------
+
+- SMTP server
+
+login
+
+-Feide saml / openID
+-Azure openID
+-ORCID openID
+
+storage
+
+-blob storage (monted on the VM)
+-S3 storage
+
+
Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install the following:
From 4ae8e0051beefdc56e4ae6f9144d843fa4ef2069 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Thu, 9 Feb 2023 13:03:42 +0100
Subject: [PATCH 193/354] comfig domain name
---
doc/installation.rst | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 3b0c291..15a2c97 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -179,9 +179,11 @@ Apache and shibboleth configuration
-----------------------------------
Apache configuration
-Change domain name in
+Change domainname in shibboleth ``shibboleth/shibboleth2.xml``
-Set up shibboleth ``shibboleth/shibboleth2.xml``
+Change domainname twice in shibboleth ``distros/dataverse.no/configs/http-ssl.conf``
+
+Change domainname twice in shibboleth ``./distros/dataverse.no/configs/domain.xml``
Copy keyen.sh comand
From 4e5b5b02c5fb366807f5d20fd9db1282d570ec02 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 9 Feb 2023 13:17:36 +0000
Subject: [PATCH 194/354] added shibboleth config
---
shibboleth/apache.config | 56 ++++
shibboleth/apache24.config | 53 ++++
shibboleth/attrChecker.html | 57 ++++
shibboleth/attribute-map.xml | 31 +++
shibboleth/attribute-policy.xml | 81 ++++++
shibboleth/bindingTemplate.html | 58 ++++
shibboleth/console.logger | 33 +++
shibboleth/discoveryTemplate.html | 48 ++++
shibboleth/globalLogout.html | 29 ++
shibboleth/keygen.sh | 91 +++++++
shibboleth/localLogout.html | 27 ++
shibboleth/metadataError.html | 35 +++
shibboleth/metagen.sh | 439 ++++++++++++++++++++++++++++++
shibboleth/native.logger | 30 ++
shibboleth/partialLogout.html | 24 ++
shibboleth/postTemplate.html | 37 +++
shibboleth/protocols.xml | 57 ++++
shibboleth/seckeygen.sh | 56 ++++
shibboleth/security-policy.xml | 44 +++
shibboleth/sessionError.html | 45 +++
shibboleth/shibboleth2.xml | 130 +++++++++
shibboleth/shibd-amazon | 133 +++++++++
shibboleth/shibd-debian | 168 ++++++++++++
shibboleth/shibd-osx.plist | 23 ++
shibboleth/shibd-redhat | 133 +++++++++
shibboleth/shibd-suse | 130 +++++++++
shibboleth/shibd-systemd | 23 ++
shibboleth/shibd.logger | 73 +++++
shibboleth/sslError.html | 33 +++
29 files changed, 2177 insertions(+)
create mode 100644 shibboleth/apache.config
create mode 100644 shibboleth/apache24.config
create mode 100644 shibboleth/attrChecker.html
create mode 100755 shibboleth/attribute-map.xml
create mode 100755 shibboleth/attribute-policy.xml
create mode 100644 shibboleth/bindingTemplate.html
create mode 100644 shibboleth/console.logger
create mode 100644 shibboleth/discoveryTemplate.html
create mode 100644 shibboleth/globalLogout.html
create mode 100755 shibboleth/keygen.sh
create mode 100644 shibboleth/localLogout.html
create mode 100644 shibboleth/metadataError.html
create mode 100755 shibboleth/metagen.sh
create mode 100644 shibboleth/native.logger
create mode 100644 shibboleth/partialLogout.html
create mode 100644 shibboleth/postTemplate.html
create mode 100644 shibboleth/protocols.xml
create mode 100755 shibboleth/seckeygen.sh
create mode 100644 shibboleth/security-policy.xml
create mode 100644 shibboleth/sessionError.html
create mode 100755 shibboleth/shibboleth2.xml
create mode 100644 shibboleth/shibd-amazon
create mode 100644 shibboleth/shibd-debian
create mode 100644 shibboleth/shibd-osx.plist
create mode 100755 shibboleth/shibd-redhat
create mode 100644 shibboleth/shibd-suse
create mode 100644 shibboleth/shibd-systemd
create mode 100644 shibboleth/shibd.logger
create mode 100644 shibboleth/sslError.html
diff --git a/shibboleth/apache.config b/shibboleth/apache.config
new file mode 100644
index 0000000..d692f38
--- /dev/null
+++ b/shibboleth/apache.config
@@ -0,0 +1,56 @@
+# https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPApacheConfig
+
+# RPM installations on platforms with a conf.d directory will
+# result in this file being copied into that directory for you
+# and preserved across upgrades.
+
+# For non-RPM installs, you should copy the relevant contents of
+# this file to a configuration location you control.
+
+#
+# Load the Shibboleth module.
+#
+LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so
+
+#
+# An Apache handler needs to be established for the "handler" location.
+# This applies the handler to any requests for a resource with a ".sso"
+# extension.
+#
+
+ SetHandler shib-handler
+
+
+#
+# Ensures handler will be accessible.
+#
+
+ Satisfy Any
+ Allow from all
+
+
+#
+# Used for example style sheet in error templates.
+#
+
+ Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css
+
+ Satisfy Any
+ Allow from all
+
+
+
+#
+# Configure the module for content.
+#
+# You MUST enable AuthType shibboleth for the module to process
+# any requests, and there MUST be a require command as well. To
+# enable Shibboleth but not specify any session/access requirements
+# use "require shibboleth".
+#
+
+ AuthType shibboleth
+ ShibCompatWith24 On
+ ShibRequestSetting requireSession 1
+ require shib-session
+
diff --git a/shibboleth/apache24.config b/shibboleth/apache24.config
new file mode 100644
index 0000000..3a0a7b2
--- /dev/null
+++ b/shibboleth/apache24.config
@@ -0,0 +1,53 @@
+# https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPApacheConfig
+
+# RPM installations on platforms with a conf.d directory will
+# result in this file being copied into that directory for you
+# and preserved across upgrades.
+
+# For non-RPM installs, you should copy the relevant contents of
+# this file to a configuration location you control.
+
+#
+# Load the Shibboleth module.
+#
+LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so
+
+#
+# Turn this on to support "require valid-user" rules from other
+# mod_authn_* modules, and use "require shib-session" for anonymous
+# session-based authorization in mod_shib.
+#
+ShibCompatValidUser Off
+
+#
+# Ensures handler will be accessible.
+#
+
+ AuthType None
+ Require all granted
+
+
+#
+# Used for example style sheet in error templates.
+#
+
+
+ AuthType None
+ Require all granted
+
+ Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css
+
+
+#
+# Configure the module for content.
+#
+# You MUST enable AuthType shibboleth for the module to process
+# any requests, and there MUST be a require command as well. To
+# enable Shibboleth but not specify any session/access requirements
+# use "require shibboleth".
+#
+
+ AuthType shibboleth
+ ShibRequestSetting requireSession 1
+ require shib-session
+
diff --git a/shibboleth/attrChecker.html b/shibboleth/attrChecker.html
new file mode 100644
index 0000000..a3ddf6e
--- /dev/null
+++ b/shibboleth/attrChecker.html
@@ -0,0 +1,57 @@
+
+
+
+
+
+
+
+ Insufficient Information
+
+
+
+
+
+
+
+
We're sorry, but you cannot access this service at this time.
+
+
+
This service requires information about you that your identity provider
+()
+did not release. To gain access to this service, your identity provider
+must release the required information.
If the message above indicates success, you have been logged out of all
+the applications and systems that support the logout mechanism.
+
+
Regardless of the outcome, it is strongly advised that you close your browser
+to ensure that you complete the logout process.
+
+
+
diff --git a/shibboleth/keygen.sh b/shibboleth/keygen.sh
new file mode 100755
index 0000000..b5378fd
--- /dev/null
+++ b/shibboleth/keygen.sh
@@ -0,0 +1,91 @@
+#! /bin/sh
+
+while getopts n:h:u:g:o:e:y:bf c
+ do
+ case $c in
+ u) USER=$OPTARG;;
+ g) GROUP=$OPTARG;;
+ o) OUT=$OPTARG;;
+ b) BATCH=1;;
+ f) FORCE=1;;
+ h) FQDN=$OPTARG;;
+ e) ENTITYID=$OPTARG;;
+ y) YEARS=$OPTARG;;
+ n) PREFIX=$OPTARG;;
+ \?) echo "keygen [-o output directory (default .)] [-u username to own keypair] [-g owning groupname] [-h hostname for cert] [-y years to issue cert] [-e entityID to embed in cert] [-n filename prefix (default 'sp')]"
+ exit 1;;
+ esac
+ done
+
+if [ -z "$OUT" ] ; then
+ OUT=.
+fi
+
+if [ -z "$PREFIX" ]; then
+ PREFIX="sp"
+fi
+
+if [ -n "$FORCE" ] ; then
+ rm $OUT/${PREFIX}-key.pem $OUT/${PREFIX}-cert.pem
+fi
+
+if [ -s $OUT/${PREFIX}-key.pem -o -s $OUT/${PREFIX}-cert.pem ] ; then
+ if [ -z "$BATCH" ] ; then
+ echo The files $OUT/${PREFIX}-key.pem and/or $OUT/${PREFIX}-cert.pem already exist!
+ echo Use -f option to force recreation of keypair.
+ exit 2
+ fi
+ exit 0
+fi
+
+if [ -z "$FQDN" ] ; then
+ FQDN=`hostname`
+fi
+
+if [ -z "$YEARS" ] ; then
+ YEARS=10
+fi
+
+DAYS=`expr $YEARS \* 365`
+
+if [ -z "$ENTITYID" ] ; then
+ ALTNAME=DNS:$FQDN
+else
+ ALTNAME=DNS:$FQDN,URI:$ENTITYID
+fi
+
+SSLCNF=$OUT/${PREFIX}-cert.cnf
+cat >$SSLCNF < /dev/null
+fi
+rm $SSLCNF
+
+if [ -s $OUT/${PREFIX}-key.pem -a -n "$USER" ] ; then
+ chown $USER $OUT/${PREFIX}-key.pem $OUT/${PREFIX}-cert.pem
+fi
+
+if [ -s $OUT/${PREFIX}-key.pem -a -n "$GROUP" ] ; then
+ chgrp $GROUP $OUT/${PREFIX}-key.pem $OUT/${PREFIX}-cert.pem
+fi
diff --git a/shibboleth/localLogout.html b/shibboleth/localLogout.html
new file mode 100644
index 0000000..75bd3e1
--- /dev/null
+++ b/shibboleth/localLogout.html
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+ Local Logout
+
+
+
+
+
+
+
+
Local Logout
+
+Status of Local Logout:
+
+
+
+You MUST close your browser to complete the logout process.
+
+
+
diff --git a/shibboleth/metadataError.html b/shibboleth/metadataError.html
new file mode 100644
index 0000000..e0e6a1b
--- /dev/null
+++ b/shibboleth/metadataError.html
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+ Unknown Identity Provider
+
+
+
+
+
+
+
+
Unknown or Unusable Identity Provider
+
+
The identity provider supplying your login credentials is not authorized
+for use with this service or does not support the necessary capabilities.
+
+
To report this problem, please contact the site administrator at
+.
+
+
+
Please include the following error message in any email:
+
Identity provider lookup failed at ()
+
+
EntityID:
+
+
:
+
+
+
diff --git a/shibboleth/metagen.sh b/shibboleth/metagen.sh
new file mode 100755
index 0000000..f39d53b
--- /dev/null
+++ b/shibboleth/metagen.sh
@@ -0,0 +1,439 @@
+#!/usr/bin/bash
+
+DECLS=1
+
+TYPE="SHIB"
+
+SAML1=0
+SAML2=0
+ARTIFACT=0
+DS=0
+MDUI=0
+LOGOUT=0
+NAMEIDMGMT=0
+
+SAML10PROT="urn:oasis:names:tc:SAML:1.0:protocol"
+SAML11PROT="urn:oasis:names:tc:SAML:1.1:protocol"
+SAML20PROT="urn:oasis:names:tc:SAML:2.0:protocol"
+
+SAML20SOAP="urn:oasis:names:tc:SAML:2.0:bindings:SOAP"
+SAML20REDIRECT="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
+SAML20POST="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
+SAML20POSTSS="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST-SimpleSign"
+SAML20ART="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact"
+SAML20PAOS="urn:oasis:names:tc:SAML:2.0:bindings:PAOS"
+
+SAML1POST="urn:oasis:names:tc:SAML:1.0:profiles:browser-post"
+SAML1ART="urn:oasis:names:tc:SAML:1.0:profiles:artifact-01"
+
+while getopts a:c:e:f:h:l:n:o:s:t:u:y:d:T:12ADLNOU c
+ do
+ case $c in
+ c) CERTS[${#CERTS[*]}]=$OPTARG;;
+ e) ENTITYID=$OPTARG;;
+ f) FORMATS[${#FORMATS[*]}]=$OPTARG;;
+ h) HOSTS[${#HOSTS[*]}]=$OPTARG;;
+ l) HOSTLIST=$OPTARG;;
+ n) NAKEDHOSTS[${#NAKEDHOSTS[*]}]=$OPTARG;;
+ o) ORGNAME=$OPTARG;;
+ a) ADMIN[${#ADMIN[*]}]=$OPTARG;;
+ s) SUP[${#SUP[*]}]=$OPTARG;;
+ t) TECH[${#TECH[*]}]=$OPTARG;;
+ u) URL=$OPTARG;;
+ y) DISPLAYNAME=$OPTARG;;
+ d) DESC=$OPTARG;;
+ 1) SAML1=1;;
+ 2) SAML2=1;;
+ A) ARTIFACT=1;;
+ D) DS=1;;
+ L) LOGOUT=1;;
+ N) NAMEIDMGMT=1;;
+ O) DECLS=0;;
+ T) TYPE=$OPTARG;;
+ U) MDUI=1;;
+ \?) echo metagen [-12ADLNOU] -c cert1 [-c cert2 ...] -h host1 [-h host2 ...] [-e entityID]
+ exit 1;;
+ esac
+ done
+
+if [ ! -z $HOSTLIST ] ; then
+ if [ -s $HOSTLIST ] ; then
+ while read h
+ do
+ HOSTS[${#HOSTS[@]}]=$h
+ done <$HOSTLIST
+ else
+ echo File with list of hostnames $l does not exist!
+ exit 2
+ fi
+fi
+
+if [ ${#HOSTS[*]} -eq 0 -a ${#NAKEDHOSTS[*]} -eq 0 ] ; then
+ echo metagen [-12ADLN] -c cert1 [-c cert2 ...] -h host1 [-h host2 ...] [-e entityID]
+ exit 1
+fi
+
+if [ ${#CERTS[*]} -eq 0 ] ; then
+ CERTS[${#CERTS[*]}]=sp-cert.pem
+fi
+
+for c in ${CERTS[@]}
+do
+ if [ ! -s $c ] ; then
+ echo Certificate file $c does not exist!
+ exit 2
+ fi
+done
+
+if [ $TYPE == "SHIB" ] ; then
+ EIDSUFFIX=shibboleth
+elif [ $TYPE == "SSP" ] ; then
+ EIDSUFFIX=simplesaml
+else
+ echo "Unknown type: $TYPE \(SHIB and SSP are supported\)"
+ exit 3
+fi
+
+
+if [ -z $ENTITYID ] ; then
+ if [ ${#HOSTS[*]} -eq 0 ] ; then
+ ENTITYID=https://${NAKEDHOSTS[0]}/$EIDSUFFIX
+ else
+ ENTITYID=https://${HOSTS[0]}/$EIDSUFFIX
+ fi
+fi
+
+# Establish protocols and bindings.
+
+if [ $SAML1 -eq 0 -a $SAML2 -eq 0 ] ; then
+ SAML1=1
+ SAML2=1
+fi
+
+if [ $LOGOUT -eq 1 ] ; then
+ SAML2=1
+ if [ $TYPE == "SHIB" ] ; then
+ SLO[${#SLO[*]}]=$SAML20SOAP
+ SLO[${#SLO[*]}]=$SAML20REDIRECT
+ SLO[${#SLO[*]}]=$SAML20POST
+ SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/SOAP"
+ SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/Redirect"
+ SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/POST"
+ elif [ $TYPE == "SSP" ] ; then
+ SLO[${#SLO[*]}]=$SAML20SOAP
+ SLO[${#SLO[*]}]=$SAML20REDIRECT
+ SLOLOC[${#SLOLOC[*]}]="simplesaml/module.php/saml/sp/saml2-logout.php/default-sp"
+ SLOLOC[${#SLOLOC[*]}]="simplesaml/module.php/saml/sp/saml2-logout.php/default-sp"
+ fi
+ if [ $ARTIFACT -eq 1 -a $TYPE == "SHIB" ] ; then
+ SLO[${#SLO[*]}]=$SAML20ART
+ SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/Artifact"
+ fi
+fi
+
+if [ $NAMEIDMGMT -eq 1 -a $TYPE == "SHIB" ] ; then
+ SAML2=1
+ NIM[${#NIM[*]}]=$SAML20SOAP
+ NIM[${#NIM[*]}]=$SAML20REDIRECT
+ NIM[${#NIM[*]}]=$SAML20POST
+ NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/SOAP"
+ NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/Redirect"
+ NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/POST"
+ if [ $ARTIFACT -eq 1 -a $TYPE == "SHIB" ] ; then
+ NIM[${#NIM[*]}]=$SAML20ART
+ NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/Artifact"
+ fi
+fi
+
+if [ $SAML1 -eq 1 -a $SAML2 -eq 1 ] ; then
+ PROTENUM="$SAML20PROT $SAML11PROT"
+elif [ $SAML1 -eq 1 ] ; then
+ PROTENUM="$SAML11PROT"
+else
+ PROTENUM="$SAML20PROT"
+fi
+
+if [ $SAML2 -eq 1 ] ; then
+ if [ $TYPE == "SHIB" ] ; then
+ ACS[${#ACS[*]}]=$SAML20POST
+ ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/POST"
+ ACS[${#ACS[*]}]=$SAML20POSTSS
+ ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/POST-SimpleSign"
+ if [ $ARTIFACT -eq 1 ] ; then
+ ACS[${#ACS[*]}]=$SAML20ART
+ ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/Artifact"
+ fi
+ ACS[${#ACS[*]}]=$SAML20PAOS
+ ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/ECP"
+ elif [ $TYPE == "SSP" ] ; then
+ ACS[${#ACS[*]}]=$SAML20POST
+ ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml2-acs.php/default-sp"
+ if [ $ARTIFACT -eq 1 ] ; then
+ ACS[${#ACS[*]}]=$SAML20ART
+ ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml2-acs.php/default-sp"
+ fi
+ fi
+fi
+
+if [ $SAML1 -eq 1 ] ; then
+ if [ $TYPE == "SHIB" ] ; then
+ ACS[${#ACS[*]}]=$SAML1POST
+ ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML/POST"
+ if [ $ARTIFACT -eq 1 ] ; then
+ ACS[${#ACS[*]}]=$SAML1ART
+ ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML/Artifact"
+ fi
+ elif [ $TYPE == "SSP" ] ; then
+ ACS[${#ACS[*]}]=$SAML1POST
+ ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml1-acs.php/default-sp"
+ if [ $ARTIFACT -eq 1 ] ; then
+ ACS[${#ACS[*]}]=$SAML1ART
+ ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml1-acs.php/default-sp/artifact"
+ fi
+ fi
+fi
+
+if [ $DECLS -eq 1 ] ; then
+ DECLS="xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\" xmlns:ds=\"http://www.w3.org/2000/09/xmldsig#\" "
+ if [ $DS -eq 1 ] ; then
+ DECLS="${DECLS}xmlns:disco=\"urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol\" "
+ fi
+ if [ $MDUI -eq 1 ] ; then
+ DECLS="${DECLS}xmlns:mdui=\"urn:oasis:names:tc:SAML:metadata:ui\" "
+ fi
+else
+ DECLS=""
+fi
+
+cat <
+
+EOF
+
+# Discovery BEGIN
+if [ $DS -eq 1 -a $TYPE == "SHIB" -o $MDUI -eq 1 ] ; then
+
+cat << EOF
+
+EOF
+
+if [ $MDUI -eq 1 ] ; then
+ cat << EOF
+
+EOF
+
+ if [ -n "$DISPLAYNAME" ] ; then
+ cat << EOF
+ $DISPLAYNAME
+EOF
+ fi
+
+ if [ -n "$DESC" ] ; then
+ cat << EOF
+ $DESC
+EOF
+ fi
+
+ cat << EOF
+
+EOF
+fi
+
+if [ $DS -eq 1 -a $TYPE == "SHIB" ] ; then
+ count=1
+ for h in ${HOSTS[@]}
+ do
+ cat << EOF
+
+EOF
+ let "count++"
+ done
+
+ for h in ${NAKEDHOSTS[@]}
+ do
+ cat << EOF
+
+EOF
+ let "count++"
+ done
+fi
+
+cat << EOF
+
+EOF
+
+fi
+# Discovery END
+
+for c in ${CERTS[@]}
+do
+cat << EOF
+
+
+
+
+EOF
+grep -v ^- $c
+cat << EOF
+
+
+
+
+EOF
+done
+
+# Logout BEGIN
+if [ $LOGOUT -eq 1 ] ; then
+
+for h in ${HOSTS[@]}
+do
+ count=0
+ while [ $count -lt ${#SLO[*]} ]
+ do
+ cat <
+EOF
+ let "count++"
+ done
+done
+
+for h in ${NAKEDHOSTS[@]}
+do
+ count=0
+ while [ $count -lt ${#SLO[*]} ]
+ do
+ cat <
+EOF
+ let "count++"
+ done
+done
+
+fi
+# Logout END
+
+# NameID Mgmt BEGIN
+if [ $NAMEIDMGMT -eq 1 -a $TYPE == "SHIB" ] ; then
+
+for h in ${HOSTS[@]}
+do
+ count=0
+ while [ $count -lt ${#NIM[*]} ]
+ do
+ cat <
+EOF
+ let "count++"
+ done
+done
+
+for h in ${NAKEDHOSTS[@]}
+do
+ count=0
+ while [ $count -lt ${#NIM[*]} ]
+ do
+ cat <
+EOF
+ let "count++"
+ done
+done
+
+fi
+# NameID Mgmt END
+
+for f in ${FORMATS[@]}
+do
+cat << EOF
+ $f
+EOF
+done
+
+index=0
+for h in ${HOSTS[@]}
+do
+ count=0
+ while [ $count -lt ${#ACS[*]} ]
+ do
+ cat <
+EOF
+ let "count++"
+ let "index++"
+ done
+done
+
+for h in ${NAKEDHOSTS[@]}
+do
+ count=0
+ while [ $count -lt ${#ACS[*]} ]
+ do
+ cat <
+EOF
+ let "count++"
+ let "index++"
+ done
+done
+
+cat <
+EOF
+
+if [ -n "$ORGNAME" ] ; then
+ if [ -z "$URL" ] ; then
+ URL=$ENTITYID
+ fi
+ cat <
+ $ORGNAME
+ $ORGNAME
+ $URL
+
+EOF
+fi
+
+count=${#ADMIN[*]}
+for (( i=0; i
+ ${c[0]}
+ ${c[1]}
+ ${c[2]}
+
+EOF
+done
+
+count=${#SUP[*]}
+for (( i=0; i
+ ${c[0]}
+ ${c[1]}
+ ${c[2]}
+
+EOF
+done
+
+count=${#TECH[*]}
+for (( i=0; i
+ ${c[0]}
+ ${c[1]}
+ ${c[2]}
+
+EOF
+done
+
+cat <
+
+EOF
+
diff --git a/shibboleth/native.logger b/shibboleth/native.logger
new file mode 100644
index 0000000..e9a43a5
--- /dev/null
+++ b/shibboleth/native.logger
@@ -0,0 +1,30 @@
+# set overall behavior
+log4j.rootCategory=WARN, native_log
+
+# fairly verbose for DEBUG, so generally leave at WARN/INFO
+log4j.category.XMLTooling.XMLObject=WARN
+log4j.category.XMLTooling.XMLObjectBuilder=WARN
+log4j.category.XMLTooling.KeyInfoResolver=WARN
+log4j.category.Shibboleth.IPRange=WARN
+log4j.category.Shibboleth.PropertySet=WARN
+
+# useful categories to tune independently:
+#
+# interprocess message remoting
+#log4j.category.Shibboleth.Listener=DEBUG
+# mapping of requests to applicationId
+#log4j.category.Shibboleth.RequestMapper=DEBUG
+# high level session cache operations
+#log4j.category.Shibboleth.SessionCache=DEBUG
+
+# define the appender
+
+# Change to SyslogAppender for remote syslog, and set host/port
+log4j.appender.native_log=org.apache.log4j.LocalSyslogAppender
+#log4j.appender.native_log.syslogHost=localhost
+#log4j.appender.native_log.portNumber=514
+log4j.appender.native_log.syslogName=shibboleth
+# Facility is numeric, 16 is LOCAL0
+log4j.appender.native_log.facility=16
+log4j.appender.native_log.layout=org.apache.log4j.PatternLayout
+log4j.appender.native_log.layout.ConversionPattern=%p %c %x: %m%n
diff --git a/shibboleth/partialLogout.html b/shibboleth/partialLogout.html
new file mode 100644
index 0000000..fe24a7c
--- /dev/null
+++ b/shibboleth/partialLogout.html
@@ -0,0 +1,24 @@
+
+
+
+
+
+
+
+ Partial Logout
+
+
+
+
+
+
+
+
Partial Logout
+
+
You remain logged into one or more applications accessed during your session.
+To complete the logout process, please close/exit your browser completely.
To report this problem, please contact the site administrator at
+.
+
+
+
Please include the following message in any email:
+
at ()
+
+
+
+
+
Error from identity provider:
+
+ Status:
+
+ Sub-Status:
+
+
+ Message:
+
+
+
+
+
+
diff --git a/shibboleth/shibboleth2.xml b/shibboleth/shibboleth2.xml
new file mode 100755
index 0000000..691a188
--- /dev/null
+++ b/shibboleth/shibboleth2.xml
@@ -0,0 +1,130 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SAML2 SAML1
+
+
+
+ SAML2 Local
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/shibboleth/shibd-amazon b/shibboleth/shibd-amazon
new file mode 100644
index 0000000..5275390
--- /dev/null
+++ b/shibboleth/shibd-amazon
@@ -0,0 +1,133 @@
+#!/bin/bash
+#
+# shibd Shibboleth Service Provider Daemon
+#
+# chkconfig: - 80 20
+# description: Shibboleth 2 Service Provider Daemon
+# processname: shibd
+# pidfile: /var/run/shibboleth/shibd.pid
+# config: /etc/shibboleth/shibboleth2.xml
+
+### BEGIN INIT INFO
+# Provides: shibd
+# Required-Start: $local_fs $remote_fs $network
+# Should-Start: $time
+# Should-Stop: $time
+# Required-Stop: $local_fs $remote_fs $network
+# Default-Start: 3 5
+# Default-Stop: 0 1 2 6
+# Short-Description: Shibboleth 2 Service Provider Daemon
+# Description: Starts the separate daemon used by the Shibboleth Apache module to manage state and SAML interactions.
+### END INIT INFO
+
+# Source function library.
+. /etc/rc.d/init.d/functions
+
+shibd="/usr/sbin/shibd"
+SHIBD_USER=root
+SHIBD_UMASK=022
+SHIBD_WAIT=30
+prog=shibd
+pidfile=/var/run/shibboleth/shibd.pid
+lockfile=/var/lock/subsys/$prog
+
+[ -e /etc/sysconfig/$prog ] && . /etc/sysconfig/$prog
+
+umask $SHIBD_UMASK
+
+start() {
+ echo -n $"Starting $prog: "
+ if [ -f $lockfile ] ; then
+ if [ -f $pidfile ]; then
+ read kpid < $pidfile
+ if checkpid $kpid 2>&1; then
+ echo "process already running"
+ return 1;
+ else
+ echo "lock file found but no process running for pid $kpid, continuing"
+ fi
+ fi
+ fi
+
+ # Make sure package run directory exists.
+ [ -d /var/run/shibboleth ] || mkdir /var/run/shibboleth
+
+ export SHIBD_PID=$pidfile
+ touch $pidfile
+ chown $SHIBD_USER:$SHIBD_USER $pidfile
+
+ # Handle transition from root to non-root packages.
+ chown -R $SHIBD_USER:$SHIBD_USER /var/run/shibboleth /var/cache/shibboleth 2>/dev/null || :
+ daemon --user $SHIBD_USER $shibd -p $pidfile -f -w $SHIBD_WAIT
+
+ RETVAL=$?
+ echo
+ [ $RETVAL -eq 0 ] && touch $lockfile
+ return $RETVAL
+}
+
+stop() {
+ echo -n $"Stopping $prog: "
+ killproc shibd
+
+ RETVAL=$?
+ echo
+ [ $RETVAL -eq 0 ] && rm -f $lockfile $pidfile
+ return $RETVAL
+}
+
+restart() {
+ stop
+ sleep 5
+ start
+}
+
+reload() {
+ restart
+}
+
+force_reload() {
+ restart
+}
+
+rh_status() {
+ # run checks to determine if the service is running or use generic status
+ status $prog
+}
+
+rh_status_q() {
+ rh_status >/dev/null 2>&1
+}
+
+case "$1" in
+ start)
+ rh_status_q && exit 0
+ $1
+ ;;
+ stop)
+ rh_status_q || exit 0
+ $1
+ ;;
+ restart)
+ $1
+ ;;
+ reload)
+ rh_status_q || exit 7
+ $1
+ ;;
+ force-reload)
+ force_reload
+ ;;
+ status)
+ rh_status
+ ;;
+ condrestart|try-restart)
+ rh_status_q || exit 0
+ restart
+ ;;
+ *)
+ echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}"
+ exit 2
+esac
+
+exit $?
diff --git a/shibboleth/shibd-debian b/shibboleth/shibd-debian
new file mode 100644
index 0000000..f9860d0
--- /dev/null
+++ b/shibboleth/shibd-debian
@@ -0,0 +1,168 @@
+#! /bin/sh
+### BEGIN INIT INFO
+# Provides: shibd
+# Required-Start: $local_fs $remote_fs $network
+# Required-Stop: $local_fs $remote_fs
+# Default-Start: 2 3 4 5
+# Default-Stop:
+# Short-Description: Shibboleth 3 Service Provider Daemon
+# Description: Starts the separate daemon used by the Shibboleth
+# Apache module to manage sessions and to retrieve
+# attributes from Shibboleth Identity Providers.
+### END INIT INFO
+#
+# Written by Quanah Gibson-Mount
+# Modified by Lukas Haemmerle for Shibboleth 2
+# Updated to use the LSB init functions by Russ Allbery
+#
+# Based on the dh-make template written by:
+#
+# Written by Miquel van Smoorenburg .
+# Modified for Debian
+# by Ian Murdock .
+
+PATH=/sbin:/bin:/usr/sbin:/usr/bin
+DESC="Shibboleth 3 daemon"
+NAME=shibd
+SHIB_HOME=/usr
+SHIBSP_CONFIG=/etc/shibboleth/shibboleth2.xml
+SHIBD_WAIT=30
+LD_LIBRARY_PATH=/usr/lib
+DAEMON=/usr/sbin/$NAME
+SCRIPTNAME=/etc/init.d/$NAME
+PIDFILE=/var/run/shibboleth/$NAME.pid
+DAEMON_OPTS=""
+DAEMON_USER=_shibd
+
+# Read configuration if it is present.
+[ -r /etc/default/$NAME ] && . /etc/default/$NAME
+
+# Force removal of socket
+DAEMON_OPTS="$DAEMON_OPTS -f"
+
+# Use defined configuration file
+DAEMON_OPTS="$DAEMON_OPTS -c $SHIBSP_CONFIG"
+
+# Specify pid file to use
+DAEMON_OPTS="$DAEMON_OPTS -p $PIDFILE"
+
+# Specify wait time to use
+DAEMON_OPTS="$DAEMON_OPTS -w $SHIBD_WAIT"
+
+# Exit if the package is not installed.
+[ -x "$DAEMON" ] || exit 0
+
+# Load the VERBOSE setting and other rcS variables
+. /lib/init/vars.sh
+
+# Define LSB log_* functions.
+. /lib/lsb/init-functions
+
+prepare_environment () {
+ # Ensure /var/run/shibboleth exists. /var/run may be on a tmpfs file system.
+ [ -d '/var/run/shibboleth' ] || mkdir -p '/var/run/shibboleth'
+
+ # If $DAEMON_USER is set, try to run shibd as that user. However,
+ # versions of the Debian package prior to 2.3+dfsg-1 ran shibd as root,
+ # and the local administrator may not have made the server's private key
+ # readable by $DAEMON_USER. We therefore test first by running shibd -t
+ # and looking for the error code indicating that the private key could not
+ # be read. If we get that error, we fall back on running shibd as root.
+ if [ -n "$DAEMON_USER" ]; then
+ DIAG=$(su -s $DAEMON $DAEMON_USER -- -t $DAEMON_OPTS 2>/dev/null)
+ if [ $? = 0 ] ; then
+ # openssl errstr 200100D (hex for 33558541) says:
+ # error:0200100D:system library:fopen:Permission denied
+ ERROR='ERROR OpenSSL : error code: 33558541 '
+ if echo "$DIAG" | fgrep -q "$ERROR" ; then
+ unset DAEMON_USER
+ log_warning_msg "$NAME: file permissions require running as" \
+ "root"
+ else
+ chown -Rh "$DAEMON_USER" '/var/run/shibboleth' '/var/log/shibboleth'
+ fi
+ else
+ unset DAEMON_USER
+ log_warning_msg "$NAME: unable to run config check as user" \
+ "$DAEMON_USER"
+ fi
+ unset DIAG
+ fi
+}
+
+# Start shibd.
+do_start () {
+ # Return
+ # 0 if daemon has been started
+ # 1 if daemon was already running
+ # 2 if daemon could not be started
+ start-stop-daemon --start --quiet ${DAEMON_USER:+--chuid $DAEMON_USER} \
+ --pidfile $PIDFILE --exec $DAEMON --test > /dev/null \
+ || return 1
+ start-stop-daemon --start --quiet ${DAEMON_USER:+--chuid $DAEMON_USER} \
+ --pidfile $PIDFILE --exec $DAEMON -- $DAEMON_OPTS \
+ || return 2
+}
+
+# Stop shibd.
+do_stop () {
+ # Return
+ # 0 if daemon has been stopped
+ # 1 if daemon was already stopped
+ # 2 if daemon could not be stopped
+ # other if a failure occurred
+ start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 \
+ --pidfile $PIDFILE --name $NAME
+ RETVAL="$?"
+ return "$RETVAL"
+}
+
+case "$1" in
+start)
+ prepare_environment
+
+ [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME"
+ do_start
+ case "$?" in
+ 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
+ 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
+ esac
+ ;;
+stop)
+ [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME"
+ do_stop
+ case "$?" in
+ 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
+ 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
+ esac
+ ;;
+restart|force-reload)
+ prepare_environment
+
+ log_daemon_msg "Restarting $DESC" "$NAME"
+ do_stop
+ case "$?" in
+ 0|1)
+ do_start
+ case "$?" in
+ 0) log_end_msg 0 ;;
+ 1) log_end_msg 1 ;; # Old process is still running
+ *) log_end_msg 1 ;; # Failed to start
+ esac
+ ;;
+ *)
+ # Failed to stop
+ log_end_msg 1
+ ;;
+ esac
+ ;;
+status)
+ status_of_proc -p "$PIDFILE" "$DAEMON" "$NAME" && exit 0 || exit $?
+ ;;
+*)
+ echo "Usage: $SCRIPTNAME {start|stop|restart|force-reload|status}" >&2
+ exit 1
+ ;;
+esac
+
+exit 0
diff --git a/shibboleth/shibd-osx.plist b/shibboleth/shibd-osx.plist
new file mode 100644
index 0000000..795c312
--- /dev/null
+++ b/shibboleth/shibd-osx.plist
@@ -0,0 +1,23 @@
+
+
+
+
+ Labelnet.shibboleth.sp.shibd
+ ServiceDescriptionShibboleth 3 Service Provider daemon
+
+ ProgramArguments
+
+ /usr/sbin/shibd
+ -F
+ -f
+ -p
+ /var/run/shibboleth/shibd.pid
+
+
+ RunAtLoad
+ OnDemand
+ StandardErrorPath/dev/null
+ UserNameroot
+ Umask0022
+
+
diff --git a/shibboleth/shibd-redhat b/shibboleth/shibd-redhat
new file mode 100755
index 0000000..f53a954
--- /dev/null
+++ b/shibboleth/shibd-redhat
@@ -0,0 +1,133 @@
+#!/bin/bash
+#
+# shibd Shibboleth Service Provider Daemon
+#
+# chkconfig: - 80 20
+# description: Shibboleth 3 Service Provider Daemon
+# processname: shibd
+# pidfile: /var/run/shibboleth/shibd.pid
+# config: /etc/shibboleth/shibboleth2.xml
+
+### BEGIN INIT INFO
+# Provides: shibd
+# Required-Start: $local_fs $remote_fs $network
+# Should-Start: $time
+# Should-Stop: $time
+# Required-Stop: $local_fs $remote_fs $network
+# Default-Start: 3 5
+# Default-Stop: 0 1 2 6
+# Short-Description: Shibboleth 3 Service Provider Daemon
+# Description: Starts the separate daemon used by the Shibboleth Apache module to manage state and SAML interactions.
+### END INIT INFO
+
+# Source function library.
+. /etc/rc.d/init.d/functions
+
+shibd="/usr/sbin/shibd"
+SHIBD_USER=shibd
+SHIBD_UMASK=022
+SHIBD_WAIT=30
+prog=shibd
+pidfile=/var/run/shibboleth/shibd.pid
+lockfile=/var/lock/subsys/$prog
+
+[ -e /etc/sysconfig/$prog ] && . /etc/sysconfig/$prog
+
+umask $SHIBD_UMASK
+
+start() {
+ echo -n $"Starting $prog: "
+ if [ -f $lockfile ] ; then
+ if [ -f $pidfile ]; then
+ read kpid < $pidfile
+ if checkpid $kpid 2>&1; then
+ echo "process already running"
+ return 1;
+ else
+ echo "lock file found but no process running for pid $kpid, continuing"
+ fi
+ fi
+ fi
+
+ # Make sure package run directory exists.
+ [ -d /var/run/shibboleth ] || mkdir /var/run/shibboleth
+
+ export SHIBD_PID=$pidfile
+ touch $pidfile
+ chown $SHIBD_USER:$SHIBD_USER $pidfile
+
+ # Handle transition from root to non-root packages.
+ chown -R $SHIBD_USER:$SHIBD_USER /var/run/shibboleth /var/cache/shibboleth 2>/dev/null || :
+ daemon --user $SHIBD_USER $shibd -p $pidfile -f -w $SHIBD_WAIT
+
+ RETVAL=$?
+ echo
+ [ $RETVAL -eq 0 ] && touch $lockfile
+ return $RETVAL
+}
+
+stop() {
+ echo -n $"Stopping $prog: "
+ killproc shibd
+
+ RETVAL=$?
+ echo
+ [ $RETVAL -eq 0 ] && rm -f $lockfile $pidfile
+ return $RETVAL
+}
+
+restart() {
+ stop
+ sleep 5
+ start
+}
+
+reload() {
+ restart
+}
+
+force_reload() {
+ restart
+}
+
+rh_status() {
+ # run checks to determine if the service is running or use generic status
+ status $prog
+}
+
+rh_status_q() {
+ rh_status >/dev/null 2>&1
+}
+
+case "$1" in
+ start)
+ rh_status_q && exit 0
+ $1
+ ;;
+ stop)
+ rh_status_q || exit 0
+ $1
+ ;;
+ restart)
+ $1
+ ;;
+ reload)
+ rh_status_q || exit 7
+ $1
+ ;;
+ force-reload)
+ force_reload
+ ;;
+ status)
+ rh_status
+ ;;
+ condrestart|try-restart)
+ rh_status_q || exit 0
+ restart
+ ;;
+ *)
+ echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}"
+ exit 2
+esac
+
+exit $?
diff --git a/shibboleth/shibd-suse b/shibboleth/shibd-suse
new file mode 100644
index 0000000..d90dfab
--- /dev/null
+++ b/shibboleth/shibd-suse
@@ -0,0 +1,130 @@
+#! /bin/sh
+# Author: Peter Schober and many others
+# based on shibd-debian (from Shibboleth's 1.3.1 SP source distribution)
+# and SUSE's /etc/init.d/cyrus
+#
+# /etc/init.d/shibd
+#
+### BEGIN INIT INFO
+# Provides: shibd
+# Required-Start: $local_fs $remote_fs $network
+# Should-Start: $time
+# Should-Stop: $time
+# Required-Stop: $local_fs $remote_fs $network
+# Default-Start: 3 5
+# Default-Stop: 0 1 2 6
+# Short-Description: Shibboleth 3 Service Provider Daemon
+# Description: Starts the separate daemon used by the Shibboleth Apache module to manage state and SAML interactions.
+### END INIT INFO
+#
+
+DESC="Shibboleth 3 daemon"
+NAME=shibd
+SHIB_CONFIG=/etc/shibboleth/shibboleth2.xml
+DAEMON=/usr/sbin/$NAME
+SCRIPTNAME=/etc/init.d/$NAME
+PID_FILE=/var/run/shibboleth/shibd.pid
+SHIBD_USER=shibd
+SHIBD_UMASK=022
+SHIBD_WAIT=30
+DAEMON_OPTS=""
+
+[ -e /etc/sysconfig/$NAME ] && . /etc/sysconfig/$NAME
+
+# Force removal of socket
+DAEMON_OPTS="$DAEMON_OPTS -f"
+
+# Use defined configuration file
+DAEMON_OPTS="$DAEMON_OPTS -c $SHIB_CONFIG"
+
+# Specify pid file to use
+DAEMON_OPTS="$DAEMON_OPTS -p $PID_FILE"
+
+# Specify wait time to use
+DAEMON_OPTS="$DAEMON_OPTS -w $SHIBD_WAIT"
+
+umask $SHIBD_UMASK
+
+# Exit if the package is not installed.
+test -x "$DAEMON" || exit 5
+
+. /etc/rc.status
+
+# First reset status of this service
+rc_reset
+
+case "$1" in
+ start)
+ # Make sure package run directory exists.
+ [ -d /var/run/shibboleth ] || mkdir /var/run/shibboleth
+
+ # Handle transition from root to non-root packages.
+ chown -R $SHIBD_USER:$SHIBD_USER /var/run/shibboleth /var/cache/shibboleth 2>/dev/null || :
+
+ echo -n "Starting $DESC ($NAME)"
+ ## Start daemon with startproc(8). If this fails
+ ## the echo return value is set appropriate.
+
+ # NOTE: startproc return 0, even if service is
+ # already running to match LSB spec.
+ /sbin/startproc -u $SHIBD_USER -p $PID_FILE $DAEMON $DAEMON_OPTS > /dev/null 2>&1
+
+ # Remember status and be verbose
+ rc_status -v
+ ;;
+ stop)
+ echo -n "Shutting down $DESC ($NAME)"
+ ## Stop daemon with killproc(8) and if this fails
+ ## set echo the echo return value.
+
+ /sbin/killproc -p $PID_FILE -TERM $DAEMON > /dev/null 2>&1
+
+ # Remember status and be verbose
+ rc_status -v
+ ;;
+ try-restart)
+ ## Stop the service and if this succeeds (i.e. the
+ ## service was running before), start it again.
+ ## Note: try-restart is not (yet) part of LSB (as of 0.7.5)
+ $0 status >/dev/null && $0 restart
+
+ # Remember status and be quiet
+ rc_status
+ ;;
+ restart)
+ ## Stop the service and regardless of whether it was
+ ## running or not, start it again.
+ $0 stop
+ $0 start
+
+ # Remember status and be quiet
+ rc_status
+ ;;
+ configtest)
+ ## Check config files
+
+ echo -n "Checking config for $DESC ($NAME): "
+ $DAEMON $DAEMON_OPTS -t
+ rc_status -v
+ ;;
+ status)
+ echo -n "Checking for service $DESC ($NAME)"
+ ## Check status with checkproc(8), if process is running
+ ## checkproc will return with exit status 0.
+
+ # Status has a slightly different for the status command:
+ # 0 - service running
+ # 1 - service dead, but /var/run/ pid file exists
+ # 2 - service dead, but /var/lock/ lock file exists
+ # 3 - service not running
+
+ # NOTE: checkproc returns LSB compliant status values.
+ /sbin/checkproc -p $PID_FILE $DAEMON
+ rc_status -v
+ ;;
+ *)
+ echo "Usage: $0 {start|stop|status|configtest|try-restart|restart}"
+ exit 1
+ ;;
+esac
+rc_exit
diff --git a/shibboleth/shibd-systemd b/shibboleth/shibd-systemd
new file mode 100644
index 0000000..c02f0d6
--- /dev/null
+++ b/shibboleth/shibd-systemd
@@ -0,0 +1,23 @@
+[Unit]
+Description=Shibboleth Service Provider Daemon
+Documentation=man:shibd(8)
+Documentation=https://wiki.shibboleth.net/confluence/display/SP3/Home
+After=network.target
+Before=httpd.service
+
+[Service]
+Type=notify
+NotifyAccess=main
+User=shibd
+#Environment=LD_LIBRARY_PATH=/opt/shibboleth/lib
+ExecStart=/usr/sbin/shibd -f -F
+StandardInput=null
+StandardOutput=null
+StandardError=journal
+TimeoutStopSec=1min
+TimeoutStartSec=5min
+Restart=on-failure
+RestartSec=30s
+
+[Install]
+WantedBy=multi-user.target
diff --git a/shibboleth/shibd.logger b/shibboleth/shibd.logger
new file mode 100644
index 0000000..39950c5
--- /dev/null
+++ b/shibboleth/shibd.logger
@@ -0,0 +1,73 @@
+# set overall behavior
+log4j.rootCategory=INFO, shibd_log, warn_log
+
+# fairly verbose for DEBUG, so generally leave at INFO
+log4j.category.XMLTooling.XMLObject=INFO
+log4j.category.XMLTooling.XMLObjectBuilder=INFO
+log4j.category.XMLTooling.KeyInfoResolver=INFO
+log4j.category.Shibboleth.IPRange=INFO
+log4j.category.Shibboleth.PropertySet=INFO
+
+# raise for low-level tracing of SOAP client HTTP/SSL behavior
+log4j.category.XMLTooling.libcurl=INFO
+
+# useful categories to tune independently:
+#
+# tracing of SAML messages and security policies
+#log4j.category.OpenSAML.MessageDecoder=DEBUG
+#log4j.category.OpenSAML.MessageEncoder=DEBUG
+#log4j.category.OpenSAML.SecurityPolicyRule=DEBUG
+#log4j.category.XMLTooling.SOAPClient=DEBUG
+# interprocess message remoting
+#log4j.category.Shibboleth.Listener=DEBUG
+# mapping of requests to applicationId
+#log4j.category.Shibboleth.RequestMapper=DEBUG
+# high level session cache operations
+#log4j.category.Shibboleth.SessionCache=DEBUG
+# persistent storage and caching
+#log4j.category.XMLTooling.StorageService=DEBUG
+
+# logs XML being signed or verified if set to DEBUG
+log4j.category.XMLTooling.Signature.Debugger=INFO, sig_log
+log4j.additivity.XMLTooling.Signature.Debugger=false
+log4j.ownAppenders.XMLTooling.Signature.Debugger=true
+
+# the tran log blocks the "default" appender(s) at runtime
+# Level should be left at INFO for this category
+log4j.category.Shibboleth-TRANSACTION=INFO, tran_log
+log4j.additivity.Shibboleth-TRANSACTION=false
+log4j.ownAppenders.Shibboleth-TRANSACTION=true
+
+# uncomment to suppress particular event types
+#log4j.category.Shibboleth-TRANSACTION.AuthnRequest=WARN
+#log4j.category.Shibboleth-TRANSACTION.Login=WARN
+#log4j.category.Shibboleth-TRANSACTION.Logout=WARN
+
+# define the appenders
+
+log4j.appender.shibd_log=org.apache.log4j.RollingFileAppender
+log4j.appender.shibd_log.fileName=/var/log/shibboleth/shibd.log
+log4j.appender.shibd_log.maxFileSize=1000000
+log4j.appender.shibd_log.maxBackupIndex=10
+log4j.appender.shibd_log.layout=org.apache.log4j.PatternLayout
+log4j.appender.shibd_log.layout.ConversionPattern=%d{%Y-%m-%d %H:%M:%S} %p %c %x: %m%n
+
+log4j.appender.warn_log=org.apache.log4j.RollingFileAppender
+log4j.appender.warn_log.fileName=/var/log/shibboleth/shibd_warn.log
+log4j.appender.warn_log.maxFileSize=1000000
+log4j.appender.warn_log.maxBackupIndex=10
+log4j.appender.warn_log.layout=org.apache.log4j.PatternLayout
+log4j.appender.warn_log.layout.ConversionPattern=%d{%Y-%m-%d %H:%M:%S} %p %c %x: %m%n
+log4j.appender.warn_log.threshold=WARN
+
+log4j.appender.tran_log=org.apache.log4j.RollingFileAppender
+log4j.appender.tran_log.fileName=/var/log/shibboleth/transaction.log
+log4j.appender.tran_log.maxFileSize=1000000
+log4j.appender.tran_log.maxBackupIndex=20
+log4j.appender.tran_log.layout=org.apache.log4j.PatternLayout
+log4j.appender.tran_log.layout.ConversionPattern=%d{%Y-%m-%d %H:%M:%S}|%c|%m%n
+
+log4j.appender.sig_log=org.apache.log4j.FileAppender
+log4j.appender.sig_log.fileName=/var/log/shibboleth/signature.log
+log4j.appender.sig_log.layout=org.apache.log4j.PatternLayout
+log4j.appender.sig_log.layout.ConversionPattern=%m
diff --git a/shibboleth/sslError.html b/shibboleth/sslError.html
new file mode 100644
index 0000000..367366a
--- /dev/null
+++ b/shibboleth/sslError.html
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+ POST Failed
+
+
+
+
+
+
+
+
POST Failed
+
+
+You have attemped to submit information without the protection
+of TLS to this site.
+
+
+
+For the protection of your submission and the integrity of the site,
+this is not permitted. Please try accessing the server with a
+URL starting with https:// and report this problem
+to
+
+
+
+
From 3a4219e64b21c77b44babfae23773bcd783035b9 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 14 Feb 2023 13:25:50 +0100
Subject: [PATCH 195/354] Update installation.rst
---
doc/installation.rst | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 15a2c97..35a6381 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -21,9 +21,9 @@ storage
Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install the following:
-- [Docker and Docker-compose](https://https://www.docker.com/)
-* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
-+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli)
+- [Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
+* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development.
++ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
Installation of docker, docker-compose, git and, azure-cli
----------------------------------------------------------
From 8fd8691ce60720385ac677451a8a2dac0b41c923 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 13:13:43 +0100
Subject: [PATCH 196/354] Create README.rst
---
doc/README.rst | 1 +
1 file changed, 1 insertion(+)
create mode 100644 doc/README.rst
diff --git a/doc/README.rst b/doc/README.rst
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/doc/README.rst
@@ -0,0 +1 @@
+
From fd0bac9a01161271ae7d7e044645844f5ba6a889 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 13:42:49 +0100
Subject: [PATCH 197/354] Update README.rst
---
doc/README.rst | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/doc/README.rst b/doc/README.rst
index 8b13789..8136110 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1 +1,9 @@
+The following documentation will guide you through building a custom docker container with a Dataverse project. DataverseNO uses Docker containers to manage deployment and updates.
+1 Motivation for Dataverse Docker Container
+Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But once you have deployed the application into a server, boom! Your application does not work anymore.
+Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges. Docker will come to your help and will help remove these challenges because of the incompatibility problem
+This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS.
+2 What is Docker
+Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
+They are several steps needed to implementation docker: 1) Install Docker on your machine, 2) create a file called Dockerfile, 3) Build the image and development of Dockerfile.
From cf81abd85f7c20217ed5f36aab9870f830c297ce Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 13:47:00 +0100
Subject: [PATCH 198/354] Update README.rst
---
doc/README.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 8136110..e3ca617 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1,9 +1,9 @@
The following documentation will guide you through building a custom docker container with a Dataverse project. DataverseNO uses Docker containers to manage deployment and updates.
-1 Motivation for Dataverse Docker Container
+1. Motivation for Dataverse Docker Container
Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But once you have deployed the application into a server, boom! Your application does not work anymore.
Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges. Docker will come to your help and will help remove these challenges because of the incompatibility problem
This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS.
-2 What is Docker
+2. What is Docker
Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
They are several steps needed to implementation docker: 1) Install Docker on your machine, 2) create a file called Dockerfile, 3) Build the image and development of Dockerfile.
From 81db5699d6e492bbd10ef5ef3aa0bc26efecf7ce Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 13:53:47 +0100
Subject: [PATCH 199/354] Update README.rst
---
doc/README.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index e3ca617..825367d 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1,9 +1,9 @@
The following documentation will guide you through building a custom docker container with a Dataverse project. DataverseNO uses Docker containers to manage deployment and updates.
-1. Motivation for Dataverse Docker Container
+#. Motivation for Dataverse Docker Container
Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But once you have deployed the application into a server, boom! Your application does not work anymore.
Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges. Docker will come to your help and will help remove these challenges because of the incompatibility problem
This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS.
-2. What is Docker
+#. What is Docker
Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
They are several steps needed to implementation docker: 1) Install Docker on your machine, 2) create a file called Dockerfile, 3) Build the image and development of Dockerfile.
From de1bc671cab77f3554e8a30023ba7ac9709b4743 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 13:54:16 +0100
Subject: [PATCH 200/354] Update README.rst
---
doc/README.rst | 2 ++
1 file changed, 2 insertions(+)
diff --git a/doc/README.rst b/doc/README.rst
index 825367d..d143e87 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1,8 +1,10 @@
The following documentation will guide you through building a custom docker container with a Dataverse project. DataverseNO uses Docker containers to manage deployment and updates.
+
#. Motivation for Dataverse Docker Container
Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But once you have deployed the application into a server, boom! Your application does not work anymore.
Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges. Docker will come to your help and will help remove these challenges because of the incompatibility problem
This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS.
+
#. What is Docker
Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
From 1087b081e9e978aacbbbe3c3d5069bdf62a37e0b Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 13:55:50 +0100
Subject: [PATCH 201/354] Update README.rst
---
doc/README.rst | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index d143e87..e3b770f 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1,11 +1,14 @@
The following documentation will guide you through building a custom docker container with a Dataverse project. DataverseNO uses Docker containers to manage deployment and updates.
-#. Motivation for Dataverse Docker Container
+1. Motivation for Dataverse Docker Container
+--------------------------------------------
+
Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But once you have deployed the application into a server, boom! Your application does not work anymore.
Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges. Docker will come to your help and will help remove these challenges because of the incompatibility problem
This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS.
-#. What is Docker
+2. What is Docker
+-----------------
Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
They are several steps needed to implementation docker: 1) Install Docker on your machine, 2) create a file called Dockerfile, 3) Build the image and development of Dockerfile.
From 17ac47221c3db7c736646987e03d7d368e994df3 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 14:10:12 +0100
Subject: [PATCH 202/354] Update README.rst
---
doc/README.rst | 16 +++++++++++-----
1 file changed, 11 insertions(+), 5 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index e3b770f..6c13140 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1,14 +1,20 @@
-The following documentation will guide you through building a custom docker container with a Dataverse project. DataverseNO uses Docker containers to manage deployment and updates.
+The following documentation will guide you through building a custom docker container with a Dataverse project on Microsoft Azure Cloud. DataverseNO uses Docker containers to manage Dataverse deployment and updates.
1. Motivation for Dataverse Docker Container
--------------------------------------------
-Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But once you have deployed the application into a server, boom! Your application does not work anymore.
-Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges. Docker will come to your help and will help remove these challenges because of the incompatibility problem
-This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS.
+Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server, boom! Your application does not work anymore.
+Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges.
+Docker will come to your help and will help remove these challenges because of the incompatibility problem.
+
+This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS on Microsoft Azure Cloud.
2. What is Docker
-----------------
Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
-They are several steps needed to implementation docker: 1) Install Docker on your machine, 2) create a file called Dockerfile, 3) Build the image and development of Dockerfile.
+They are several steps needed to implementation Dataverse Docker Container:
+1) Install Docker on your machine,
+2) create a file called Dockerfile,
+3) Build the image and development of Dockerfile.
+
From da15c6fc3d0809d90436640556850fb6febfa201 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 14:26:06 +0100
Subject: [PATCH 203/354] Update README.rst
---
doc/README.rst | 18 ++++++++----------
1 file changed, 8 insertions(+), 10 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 6c13140..9a5b090 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -3,18 +3,16 @@ The following documentation will guide you through building a custom docker cont
1. Motivation for Dataverse Docker Container
--------------------------------------------
-Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server, boom! Your application does not work anymore.
-Many factors can contribute that makes this happen. It could be the operating system compatibility or different library versions. Therefore, your application could be deployed, and you will get a lot of challenges.
-Docker will come to your help and will help remove these challenges because of the incompatibility problem.
+Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server for production or demo, boom! Your application does not work anymore. Many factors can contribute to make this happen. It could be the operating system compatibility or different library versions. Therefore, your application could not be deployed, and you will get a lot of challenges.
+Docker will come to your help and will help remove these challenges because of the incompatibility problems.
-This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. The test is done on Ubuntu OS on Microsoft Azure Cloud.
+This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. DataverseNO test is done on Ubuntu OS on the Microsoft Azure Cloud.
-2. What is Docker
+2. What is Docker?
-----------------
Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
-They are several steps needed to implementation Dataverse Docker Container:
-1) Install Docker on your machine,
-2) create a file called Dockerfile,
-3) Build the image and development of Dockerfile.
-
+They are several steps needed to implementation DataverseNO Docker Container:
+1) Installation of Docker on our VM machine,
+2) Creation of Dockerfile, and
+3) Building images and development of Dockerfile.
From ccd282d2f324c2f6c5be6271c07e3c19884ffca9 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 14:27:04 +0100
Subject: [PATCH 204/354] Update README.rst
---
doc/README.rst | 3 +++
1 file changed, 3 insertions(+)
diff --git a/doc/README.rst b/doc/README.rst
index 9a5b090..5558c6d 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -13,6 +13,9 @@ This documentation will show you how to use Docker to containerize your Datavers
Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
They are several steps needed to implementation DataverseNO Docker Container:
+
1) Installation of Docker on our VM machine,
+
2) Creation of Dockerfile, and
+
3) Building images and development of Dockerfile.
From b56b9d5aeb9a3edee1399ee3188df475f95a0f60 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 14:31:19 +0100
Subject: [PATCH 205/354] Update README.rst
---
doc/README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/README.rst b/doc/README.rst
index 5558c6d..0d01996 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1,6 +1,6 @@
The following documentation will guide you through building a custom docker container with a Dataverse project on Microsoft Azure Cloud. DataverseNO uses Docker containers to manage Dataverse deployment and updates.
-1. Motivation for Dataverse Docker Container
+1. Motivation for DataverseNO Docker Container
--------------------------------------------
Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server for production or demo, boom! Your application does not work anymore. Many factors can contribute to make this happen. It could be the operating system compatibility or different library versions. Therefore, your application could not be deployed, and you will get a lot of challenges.
From 703f294ee5d247f7d2bb75fa1dd23e789a41d484 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 15 Feb 2023 13:37:45 +0000
Subject: [PATCH 206/354] turn off header on variable + minor update
---
distros/dataverse.no/docker-compose.yaml | 1 +
distros/dataverse.no/init.d/0000-preboot.sh | 0
.../dataverse.no/init.d/01-persistent-id.sh | 1 +
.../init.d/012-disable-imageMagick.sh | 0
.../dataverse.no/init.d/023-afilliation.sh | 0
.../dataverse.no/init.d/055-dvwebloader.sh | 0
.../init.d/101-header-footer-custumisation.sh | 17 ++++++++++++---
distros/dataverse.no/init.d/202-trigger.sh | 7 -------
.../init.d/203-counterprocessor.sh | 21 -------------------
.../affiliations/builtinuser_trigger.sql.2 | 1 +
.../init.d/affiliations/updatetrigger.sql | 1 +
11 files changed, 18 insertions(+), 31 deletions(-)
mode change 100644 => 100755 distros/dataverse.no/init.d/0000-preboot.sh
mode change 100644 => 100755 distros/dataverse.no/init.d/012-disable-imageMagick.sh
mode change 100644 => 100755 distros/dataverse.no/init.d/023-afilliation.sh
mode change 100644 => 100755 distros/dataverse.no/init.d/055-dvwebloader.sh
delete mode 100755 distros/dataverse.no/init.d/202-trigger.sh
delete mode 100644 distros/dataverse.no/init.d/203-counterprocessor.sh
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index c15bf80..61f2cea 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -201,6 +201,7 @@ services:
- "POSTGRES_USER"
- "POSTGRES_PASSWORD"
- "PGPASSWORD"
+ - "TESTBANNER"
- "TWORAVENS_LOCATION=NOT INSTALLED"
- "RSERVE_HOST=localhost"
- "RSERVE_PORT=6311"
diff --git a/distros/dataverse.no/init.d/0000-preboot.sh b/distros/dataverse.no/init.d/0000-preboot.sh
old mode 100644
new mode 100755
diff --git a/distros/dataverse.no/init.d/01-persistent-id.sh b/distros/dataverse.no/init.d/01-persistent-id.sh
index d1518b1..6484e33 100755
--- a/distros/dataverse.no/init.d/01-persistent-id.sh
+++ b/distros/dataverse.no/init.d/01-persistent-id.sh
@@ -5,6 +5,7 @@ SERVER=http://${DATAVERSE_URL}/api
echo $SERVER
curl -X PUT -d https://site.uit.no/dataverseno/support/ "$SERVER/admin/settings/:NavbarSupportUrl"
curl -X PUT -d http://site.uit.no/dataverseno/deposit/ "$SERVER/admin/settings/:NavbarGuidesUrl"
+curl -X PUT -d https://site.uit.no/dataverseno/deposit/deposit-your-data/#log-in "$SERVER/admin/settings/:GuidesBaseUrl"
curl -X PUT -d 'false' "$SERVER/admin/settings/:AllowSignUp"
curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl"
curl -X PUT -d CV "$SERVER/admin/settings/:CV"
diff --git a/distros/dataverse.no/init.d/012-disable-imageMagick.sh b/distros/dataverse.no/init.d/012-disable-imageMagick.sh
old mode 100644
new mode 100755
diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh
old mode 100644
new mode 100755
diff --git a/distros/dataverse.no/init.d/055-dvwebloader.sh b/distros/dataverse.no/init.d/055-dvwebloader.sh
old mode 100644
new mode 100755
diff --git a/distros/dataverse.no/init.d/101-header-footer-custumisation.sh b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
index 1dd49db..4fa5313 100755
--- a/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
+++ b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh
@@ -4,8 +4,19 @@ wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no
wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/custom-footer.html -O /tmp/custom-footer.html
curl -X PUT -d '/logos/navbar/logo.png' http://localhost:8080/api/admin/settings/:LogoCustomizationFile
-curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile
+#curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile
curl -X PUT -d '/tmp/custom-footer.html' http://localhost:8080/api/admin/settings/:FooterCustomizationFile
-curl -X PUT -d http://site.uit.no/dataverseno/deposit/ http://localhost:8080/api/admin/settings/:GuidesBaseUrl
-curl -X PUT -d '' http://localhost:8080/api/admin/settings/:GuidesVersion
+#curl -X PUT -d http://site.uit.no/dataverseno/deposit/ http://localhost:8080/api/admin/settings/:GuidesBaseUrl
+#curl -X PUT -d '' http://localhost:8080/api/admin/settings/:GuidesVersion
curl -X PUT -d https://site.uit.no/dataverseno/support/ http://localhost:8080/api/admin/settings/:NavbarSupportUrl
+curl -X PUT -d https://site.uit.no/dataverseno/about/policy-framework/access-and-use-policy/ http://localhost:8080/api/admin/settings/:ApplicationPrivacyPolicyUrl
+
+#file.dataFilesTab.metadata.header=Metadata
+
+if [ ! -z ${TESTBANNER+x} ];
+ then
+ curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile
+ else
+ curl -X PUT -d '' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile
+ fi
+
diff --git a/distros/dataverse.no/init.d/202-trigger.sh b/distros/dataverse.no/init.d/202-trigger.sh
deleted file mode 100755
index 5414825..0000000
--- a/distros/dataverse.no/init.d/202-trigger.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-python3 ${INIT_SCRIPTS_FOLDER}/affiliations/affiliation2data.py > /tmp/affiliations.sql
-export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/builtinuser_trigger.sql.2
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/groupuser_trigger.sql
-psql -U dataverse dataverse -h postgres -f /tmp/affiliations.sql
diff --git a/distros/dataverse.no/init.d/203-counterprocessor.sh b/distros/dataverse.no/init.d/203-counterprocessor.sh
deleted file mode 100644
index 4de989f..0000000
--- a/distros/dataverse.no/init.d/203-counterprocessor.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-
-#!/bin/bash
-
-mkdir /opt/payara/counter-processor
-cd /opt/payara/counter-processor
-wget https://github.com/CDLUC3/counter-processor/archive/v${COUNTERPROSVERSION}.tar.gz -O v${COUNTERPROSVERSION}.tar.gz
-tar xvfz v${COUNTERPROSVERSION}.tar.gz
-cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}
-curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=${GEOIPLICENSE}&suffix=tar.gz" -o GeoLite2-Country.tar.gz \
- && tar -xzvf GeoLite2-Country.tar.gz \
- && mv GeoLite2-Country_*/GeoLite2-Country.mmdb /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/maxmind_geoip
-
-wget https://guides.dataverse.org/en/latest/_downloads/a65ffc2dba9f406858591558ae92790c/setup-counter-processor.sh -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/setup-counter-processor.sh
-wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce5e6b83e/counter-processor-config.yaml -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/counter-processor-config.yaml
-
-curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/makeDataCount' http://localhost:8080/api/admin/settings/:MDCLogPath
-curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics
-
-pip3 install -r requirements.txt --ignore-installed PyYAML
-export ALLOWED_ENV=year_month
-
diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2
index cb2835b..1dc9f7b 100644
--- a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2
+++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2
@@ -1 +1,2 @@
CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate();
+#CREATE TRIGGER affiliation_trigger_update AFTER UPDATE ON public.authenticateduser FOR EACH ROW WHEN (OLD.emailconfirmed is not null) EXECUTE PROCEDURE public.affiliationupdate();
diff --git a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql
index 95e6530..c58aa9e 100644
--- a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql
+++ b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql
@@ -20,3 +20,4 @@ RETURN NULL;
END;
$$
CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate();
+DELETE from setting where name=':Shoulder';
From f0c6388b77770b7330874a5d5328bb74f9318fd4 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 14:40:55 +0100
Subject: [PATCH 207/354] Update README.rst
---
doc/README.rst | 24 ++++++++++++++++++++++++
1 file changed, 24 insertions(+)
diff --git a/doc/README.rst b/doc/README.rst
index 0d01996..0f6d39d 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -19,3 +19,27 @@ They are several steps needed to implementation DataverseNO Docker Container:
2) Creation of Dockerfile, and
3) Building images and development of Dockerfile.
+
+Prerequisites & Dependencies
+----------------------------
+
+- SMTP server
+
+login
+
+-Feide saml / openID
+-Azure openID
+-ORCID openID
+
+storage
+
+-blob storage (monted on the VM)
+-S3 storage
+
+
+
+Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install the following:
+
+- [Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
+* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development.
++ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
From 7dd5c13c137377c1a99dff2c0f4684361f6461b5 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 14:55:13 +0100
Subject: [PATCH 208/354] Update README.rst
---
doc/README.rst | 16 +++++++++-------
1 file changed, 9 insertions(+), 7 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 0f6d39d..5cff598 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -8,9 +8,13 @@ Docker will come to your help and will help remove these challenges because of t
This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. DataverseNO test is done on Ubuntu OS on the Microsoft Azure Cloud.
-2. What is Docker?
------------------
-Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
+
+Prerequisites & Dependencies
+----------------------------
+
+The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
+
+-[Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers. Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
They are several steps needed to implementation DataverseNO Docker Container:
@@ -20,12 +24,10 @@ They are several steps needed to implementation DataverseNO Docker Container:
3) Building images and development of Dockerfile.
-Prerequisites & Dependencies
-----------------------------
-
+login
- SMTP server
-login
+
-Feide saml / openID
-Azure openID
From 984c089577e3ee5ade3a844d7e0b56ab6d81a900 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:09:09 +0100
Subject: [PATCH 209/354] Update README.rst
---
doc/README.rst | 38 +++++++++++++++++---------------------
1 file changed, 17 insertions(+), 21 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 5cff598..e1dc114 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -1,7 +1,7 @@
The following documentation will guide you through building a custom docker container with a Dataverse project on Microsoft Azure Cloud. DataverseNO uses Docker containers to manage Dataverse deployment and updates.
-1. Motivation for DataverseNO Docker Container
---------------------------------------------
+Motivation for DataverseNO Docker Container
+-------------------------------------------
Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server for production or demo, boom! Your application does not work anymore. Many factors can contribute to make this happen. It could be the operating system compatibility or different library versions. Therefore, your application could not be deployed, and you will get a lot of challenges.
Docker will come to your help and will help remove these challenges because of the incompatibility problems.
@@ -9,12 +9,21 @@ Docker will come to your help and will help remove these challenges because of t
This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. DataverseNO test is done on Ubuntu OS on the Microsoft Azure Cloud.
-Prerequisites & Dependencies
+Prerequisites & Requirements
----------------------------
-The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
+The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
--[Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers. Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
+- login
+- SMTP server
+- Feide saml / openID
+- Azure openID
+- ORCID openID
+- Storage - Cloudian
+- Blob storage (monted on the VM)
+- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud. (an object storage fo
+
+- [Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers. Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
They are several steps needed to implementation DataverseNO Docker Container:
@@ -24,24 +33,11 @@ They are several steps needed to implementation DataverseNO Docker Container:
3) Building images and development of Dockerfile.
-login
-- SMTP server
-
+* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files
++ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
--Feide saml / openID
--Azure openID
--ORCID openID
-
-storage
-
--blob storage (monted on the VM)
--S3 storage
+Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install.
-
-Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install the following:
-- [Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development.
-+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
From a631552c0cd834c3902d8b53c607cae831d91721 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:12:37 +0100
Subject: [PATCH 210/354] Update README.rst
---
doc/README.rst | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index e1dc114..1cafe61 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -23,15 +23,15 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- Blob storage (monted on the VM)
- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud. (an object storage fo
-- [Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers. Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels.
+- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-They are several steps needed to implementation DataverseNO Docker Container:
+ They are several steps needed to implementation DataverseNO Docker Container:
-1) Installation of Docker on our VM machine,
+ 1) Installation of Docker on our VM machine,
-2) Creation of Dockerfile, and
+ 2) Creation of Dockerfile, and
-3) Building images and development of Dockerfile.
+ 3) Building images and development of Dockerfile.
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files
From 293782da97ccc70fd304bb782c1986ea4a799e5e Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:21:04 +0100
Subject: [PATCH 211/354] Update README.rst
---
doc/README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/README.rst b/doc/README.rst
index 1cafe61..fbb3123 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -18,7 +18,7 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- SMTP server
- Feide saml / openID
- Azure openID
-- ORCID openID
+- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places.
- Storage - Cloudian
- Blob storage (monted on the VM)
- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud. (an object storage fo
From 1f5163af01912c33331440589adc31ac9062d932 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:38:12 +0100
Subject: [PATCH 212/354] Update README.rst
---
doc/README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/README.rst b/doc/README.rst
index fbb3123..31eb223 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -18,7 +18,7 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- SMTP server
- Feide saml / openID
- Azure openID
-- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places.
+- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places.
- Storage - Cloudian
- Blob storage (monted on the VM)
- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud. (an object storage fo
From 39a0224e5b78e7e7441aa215be983b998b9f317a Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:39:29 +0100
Subject: [PATCH 213/354] Update installation.rst
---
doc/installation.rst | 24 ------------------------
1 file changed, 24 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 35a6381..150b2c7 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -1,30 +1,6 @@
Dataverse installation on Microsoft Azure
=========================================
-Dependencies
-------------
-
-- SMTP server
-
-login
-
--Feide saml / openID
--Azure openID
--ORCID openID
-
-storage
-
--blob storage (monted on the VM)
--S3 storage
-
-
-
-Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install the following:
-
-- [Docker and Docker-compose](https://https://www.docker.com/) - Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development.
-+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
-
Installation of docker, docker-compose, git and, azure-cli
----------------------------------------------------------
From 8301fb0d9f3ad6d21963ccad4a5d4d2ea8d36f55 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:40:30 +0100
Subject: [PATCH 214/354] Update README.rst
---
doc/README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/README.rst b/doc/README.rst
index 31eb223..5d3d357 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -9,7 +9,7 @@ Docker will come to your help and will help remove these challenges because of t
This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. DataverseNO test is done on Ubuntu OS on the Microsoft Azure Cloud.
-Prerequisites & Requirements
+Requirements & Prerequisites
----------------------------
The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
From 5460f531bc3a376b1d2e13d6e5215b60b7d33151 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 15:57:43 +0100
Subject: [PATCH 215/354] Update README.rst
---
doc/README.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 5d3d357..f5af9bd 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -21,13 +21,13 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places.
- Storage - Cloudian
- Blob storage (monted on the VM)
-- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud. (an object storage fo
+- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
They are several steps needed to implementation DataverseNO Docker Container:
- 1) Installation of Docker on our VM machine,
+ 1) Installation of Docker on a VM machine,
2) Creation of Dockerfile, and
From 266a3d6b0471194faa99f7bd43cdb2368149115e Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 16:19:37 +0100
Subject: [PATCH 216/354] Update README.rst
---
doc/README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/README.rst b/doc/README.rst
index f5af9bd..6dc2c05 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -20,7 +20,7 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- Azure openID
- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places.
- Storage - Cloudian
-- Blob storage (monted on the VM)
+- Blob storage (mounted on the VM)
- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
From 44fc4c930d2f17bbe3de0fc0eb4362e090942f18 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Wed, 15 Feb 2023 16:23:40 +0100
Subject: [PATCH 217/354] Update README.rst
---
doc/README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/README.rst b/doc/README.rst
index 6dc2c05..4a06613 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -33,7 +33,7 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
3) Building images and development of Dockerfile.
-* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files
+* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done.
+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
From 985e86101d3cbc2755de463d715784d4a4d90a2b Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 15 Feb 2023 17:35:02 +0000
Subject: [PATCH 218/354] fixed log in shibbolet somtime not working
---
distros/dataverse.no/docker-compose.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 61f2cea..1f48ee7 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -66,7 +66,7 @@ services:
- "443:9443"
volumes:
- ${LOGS_PATH}/shibboleth/httpd:/var/log/httpd
- - ${LOGS_PATH}/shibboleth/shibboleth:/var/log/shibboleth
+ # - ${LOGS_PATH}/shibboleth/shibboleth:/var/log/shibboleth
- ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth
- ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf
- ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/pki/tls/certs/localhost.crt
From 8c2b3aa1f5d9c9c4f853c678f2190a963eee6391 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 16 Feb 2023 08:50:02 +0000
Subject: [PATCH 219/354] removed $hosname for shibboleth dure to it creating
conflict when starting with "dataverse."
---
distros/dataverse.no/docker-compose.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 1f48ee7..b21890b 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -71,7 +71,7 @@ services:
- ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf
- ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/pki/tls/certs/localhost.crt
- ${CONFIGURATION_PATH}/configuration/files/localhost.key:/etc/pki/tls/private/localhost.key
- hostname: ${hostname}
+ # hostname: ${hostname}
labels:
- "traefik.enable=true"
- traefik.http.routers.shibboleth-web.rule=Host(`${traefikhost}`) && PathPrefix(`/Shibboleth.sso`)
From 9c5639a056e54877821998e8fbc6e6a7089780be Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 16 Feb 2023 10:29:28 +0000
Subject: [PATCH 220/354] updated to internal dvwebloader for 5.13
---
.../dataverse.no/init.d/055-dvwebloader.sh | 37 +------------------
1 file changed, 2 insertions(+), 35 deletions(-)
diff --git a/distros/dataverse.no/init.d/055-dvwebloader.sh b/distros/dataverse.no/init.d/055-dvwebloader.sh
index bf2f57f..1bf8d3d 100755
--- a/distros/dataverse.no/init.d/055-dvwebloader.sh
+++ b/distros/dataverse.no/init.d/055-dvwebloader.sh
@@ -1,37 +1,4 @@
#!/bin/bash
+curl -X PUT -d 'native/http,dcm/rsync+ssh,dvwebloader' http://localhost:8080/api/admin/settings/:UploadMethods
+curl -X PUT -d 'https://gdcc.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl
-export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/webloader_check.sql -o /tmp/output
-EXIST=`grep Dataverse /tmp/output`
-
-wget https://github.com/DataverseNO/dvwebloader/archive/refs/heads/main.zip -O /tmp/dvwebloader.zip
-unzip -o /tmp/dvwebloader.zip -d $DOCROOT_DIR/logos
-
-if [[ -z $EXIST ]]; then
-echo "Loaded"
-curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
-"{
- \"displayName\": \"Dataverse WebLoader\",
- \"description\": \"Upload all the files in a local directory!\",
- \"toolName\": \"dvwebloader\",
- \"scope\": \"dataset\",
- \"contentType\":\"text/plain\",
- \"types\": [
- \"explore\"
- ],
- \"toolUrl\": \"https://${hostname}/logos/dvwebloader-main/src/dvwebloader.html\",
- \"toolParameters\": {
- \"queryParameters\": [
- {
- \"siteUrl\": \"{siteUrl}\"
- },
- {
- \"datasetPid\": \"{datasetPid}\"
- },
- {
- \"key\": \"{apiToken}\"
- }
- ]
- }
-}"
-fi
From 384f4c360ea9d1ceebddbd56684fff9574738343 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 15:55:46 +0100
Subject: [PATCH 221/354] Update README.rst
---
doc/README.rst | 15 ++++++++-------
1 file changed, 8 insertions(+), 7 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 4a06613..060a5bd 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -3,10 +3,10 @@ The following documentation will guide you through building a custom docker cont
Motivation for DataverseNO Docker Container
-------------------------------------------
-Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server for production or demo, boom! Your application does not work anymore. Many factors can contribute to make this happen. It could be the operating system compatibility or different library versions. Therefore, your application could not be deployed, and you will get a lot of challenges.
+Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server for production or demo, boom! Your application does not work anymore. Many factors can contribute making this happen. It could be the operating system compatibility or different library versions. Therefore, your application could not be deployed successfully, and you will get a lot of challenges.
Docker will come to your help and will help remove these challenges because of the incompatibility problems.
-This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. DataverseNO test is done on Ubuntu OS on the Microsoft Azure Cloud.
+This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. DataverseNO test was done on Ubuntu OS on the Microsoft Azure Cloud.
Requirements & Prerequisites
@@ -14,11 +14,12 @@ Requirements & Prerequisites
The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
-- login
-- SMTP server
-- Feide saml / openID
-- Azure openID
-- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places.
+- login to DatavarseNO:
+
+ - [FEIDE SAML / OpenID](https://www.feide.no) - EIDE is a Norwegian governement solution for secure identification in the education sectors. We use FEIDE SAML as our identity management and sing sign on (SSO). FEIDE SAML activates single Sign On (SSO) for our Dataverse application.
+ - Azure OpenID - OpenID Connect is a security-token based extension of the OAuth 2.0 authorization protocol to do single sign-on. Azure Active Directory provides an implementation of OpenID Connect (OIDC) protocol and Sysdig supports it for single sign-on and API access to Sysdig application.
+ - [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places. Authentication with ORCID is supported in Dataverse. Registration for the production Members API service is open to ORCID members organizations only. UiT is an organisation member and registered with ORCID.
+- SMTP server - is used to send, and relay outgoing email between DataverseNO (sender) and receivers. We use UiT smtp server.
- Storage - Cloudian
- Blob storage (mounted on the VM)
- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
From d7b36bd44d54e7fce37315766b332b8e51446059 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 16:16:11 +0100
Subject: [PATCH 222/354] Update README.rst
---
doc/README.rst | 15 ++++++++-------
1 file changed, 8 insertions(+), 7 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 060a5bd..e533ec1 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -14,19 +14,20 @@ Requirements & Prerequisites
The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
-- login to DatavarseNO:
+- Login to DatavarseNO:
- [FEIDE SAML / OpenID](https://www.feide.no) - EIDE is a Norwegian governement solution for secure identification in the education sectors. We use FEIDE SAML as our identity management and sing sign on (SSO). FEIDE SAML activates single Sign On (SSO) for our Dataverse application.
- Azure OpenID - OpenID Connect is a security-token based extension of the OAuth 2.0 authorization protocol to do single sign-on. Azure Active Directory provides an implementation of OpenID Connect (OIDC) protocol and Sysdig supports it for single sign-on and API access to Sysdig application.
- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places. Authentication with ORCID is supported in Dataverse. Registration for the production Members API service is open to ORCID members organizations only. UiT is an organisation member and registered with ORCID.
+ - [eduGAIN](https://edugain.org) - The eduGAIN interfederation service connects identity federations around the world, simplifying access to content, services and resources for the global research and education community.
- SMTP server - is used to send, and relay outgoing email between DataverseNO (sender) and receivers. We use UiT smtp server.
-- Storage - Cloudian
-- Blob storage (mounted on the VM)
-- S3 storage for storng dataset files stored in Cloudian. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
+- Storage
+ - VM server - Stores the Dataverse application, custom scripts and configuration files.
+ - Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
+ - Blob storage (mounted on the VM)
+ - [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-
- They are several steps needed to implementation DataverseNO Docker Container:
+They are several steps needed to implementation DataverseNO Docker Container:
1) Installation of Docker on a VM machine,
From 224c4b7e8694b584eb0a539786dd2e714e0c4680 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 16:20:47 +0100
Subject: [PATCH 223/354] Update README.rst
---
doc/README.rst | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index e533ec1..3c7f8c5 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -21,19 +21,20 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places. Authentication with ORCID is supported in Dataverse. Registration for the production Members API service is open to ORCID members organizations only. UiT is an organisation member and registered with ORCID.
- [eduGAIN](https://edugain.org) - The eduGAIN interfederation service connects identity federations around the world, simplifying access to content, services and resources for the global research and education community.
- SMTP server - is used to send, and relay outgoing email between DataverseNO (sender) and receivers. We use UiT smtp server.
+
- Storage
- VM server - Stores the Dataverse application, custom scripts and configuration files.
- Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
- Blob storage (mounted on the VM)
- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-They are several steps needed to implementation DataverseNO Docker Container:
+ Steps needed to implementation of DataverseNO Docker Container:
- 1) Installation of Docker on a VM machine,
+ 1) Installation of Docker on a VM machine,
- 2) Creation of Dockerfile, and
+ 2) Creation of Dockerfile, and
- 3) Building images and development of Dockerfile.
+ 3) Building images and development of Dockerfile.
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done.
From 23d1142f8cc7498f14f5bcf97dba3021d6fc0555 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 16:22:13 +0100
Subject: [PATCH 224/354] Update README.rst
---
doc/README.rst | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index 3c7f8c5..d6520b6 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -28,13 +28,13 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- Blob storage (mounted on the VM)
- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
- Steps needed to implementation of DataverseNO Docker Container:
+ Steps needed to implementation of DataverseNO Docker Container:
- 1) Installation of Docker on a VM machine,
+ 1) Installation of Docker on a VM machine,
- 2) Creation of Dockerfile, and
+ 2) Creation of Dockerfile, and
- 3) Building images and development of Dockerfile.
+ 3) Building images and development of Dockerfile.
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done.
From aa49c7e105108f891b5538b004ca31157c2ef396 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 16:24:51 +0100
Subject: [PATCH 225/354] Update README.rst
---
doc/README.rst | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index d6520b6..f765f0f 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -26,15 +26,15 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- VM server - Stores the Dataverse application, custom scripts and configuration files.
- Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
- Blob storage (mounted on the VM)
- - [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
+- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
- Steps needed to implementation of DataverseNO Docker Container:
+Steps needed to implementation of DataverseNO Docker Container:
- 1) Installation of Docker on a VM machine,
+ 1) Installation of Docker on a VM machine,
- 2) Creation of Dockerfile, and
+ 2) Creation of Dockerfile, and
- 3) Building images and development of Dockerfile.
+ 3) Building images and development of Dockerfile.
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done.
From 246d93c5b7ac3068164ef00a0f39adb1f91d5018 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 16:25:59 +0100
Subject: [PATCH 226/354] Update README.rst
---
doc/README.rst | 9 +++------
1 file changed, 3 insertions(+), 6 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index f765f0f..c045442 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -29,12 +29,9 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
Steps needed to implementation of DataverseNO Docker Container:
-
- 1) Installation of Docker on a VM machine,
-
- 2) Creation of Dockerfile, and
-
- 3) Building images and development of Dockerfile.
+ 1) Installation of Docker on a VM machine,
+ 2) Creation of Dockerfile, and
+ 3) Building images and development of Dockerfile.
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done.
From 7d48b0a907f626991c2bdfee3fa26b43f47cdbff Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 16:26:45 +0100
Subject: [PATCH 227/354] Update README.rst
---
doc/README.rst | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index c045442..cd4d720 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -26,9 +26,7 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- VM server - Stores the Dataverse application, custom scripts and configuration files.
- Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
- Blob storage (mounted on the VM)
-- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-
-Steps needed to implementation of DataverseNO Docker Container:
+- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers. Steps needed to implementation of DataverseNO Docker Container:
1) Installation of Docker on a VM machine,
2) Creation of Dockerfile, and
3) Building images and development of Dockerfile.
From 080fa1b967579cb9ce9eba61145d796242c2491e Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Fri, 17 Feb 2023 16:37:18 +0100
Subject: [PATCH 228/354] Update README.rst
---
doc/README.rst | 16 +++++++++-------
1 file changed, 9 insertions(+), 7 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index cd4d720..531d36f 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -12,21 +12,28 @@ This documentation will show you how to use Docker to containerize your Datavers
Requirements & Prerequisites
----------------------------
+SSH to a working VM as the administrator and make sure that you have sudo rights to install.
+
The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
- Login to DatavarseNO:
- [FEIDE SAML / OpenID](https://www.feide.no) - EIDE is a Norwegian governement solution for secure identification in the education sectors. We use FEIDE SAML as our identity management and sing sign on (SSO). FEIDE SAML activates single Sign On (SSO) for our Dataverse application.
- - Azure OpenID - OpenID Connect is a security-token based extension of the OAuth 2.0 authorization protocol to do single sign-on. Azure Active Directory provides an implementation of OpenID Connect (OIDC) protocol and Sysdig supports it for single sign-on and API access to Sysdig application.
+ - Azure OpenID - OpenID Connect is a security-token based extension of the OAuth 2.0 authorization protocol to do single sign-on. Azure OpenID supports single sign-on and API access to Dataverse application.
- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places. Authentication with ORCID is supported in Dataverse. Registration for the production Members API service is open to ORCID members organizations only. UiT is an organisation member and registered with ORCID.
- [eduGAIN](https://edugain.org) - The eduGAIN interfederation service connects identity federations around the world, simplifying access to content, services and resources for the global research and education community.
- SMTP server - is used to send, and relay outgoing email between DataverseNO (sender) and receivers. We use UiT smtp server.
- Storage
+
- VM server - Stores the Dataverse application, custom scripts and configuration files.
- Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
- Blob storage (mounted on the VM)
-- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers. Steps needed to implementation of DataverseNO Docker Container:
+
+- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
+
+Steps needed to implementation of DataverseNO Docker Container:
+
1) Installation of Docker on a VM machine,
2) Creation of Dockerfile, and
3) Building images and development of Dockerfile.
@@ -34,8 +41,3 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done.
+ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script.
-
-Prerequisites: SSH to the working VM as the administrator and make sure that you have sudo rights to install.
-
-
-
From df1901d695d0013fabbcbd685d137d6f2cb21041 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 20 Feb 2023 11:32:12 +0100
Subject: [PATCH 229/354] Update installation.rst
---
doc/installation.rst | 39 +++++++++++++++++++++++++++++++++++++++
1 file changed, 39 insertions(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index 150b2c7..c3bf935 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -4,6 +4,14 @@ Dataverse installation on Microsoft Azure
Installation of docker, docker-compose, git and, azure-cli
----------------------------------------------------------
+Update APT sources
+------------------
+
+This needs to be done so as to access packages from Docker repository.
+1. Log into your VM machine as a user with sudo or root privileges.
+2. Open a terminal window.
+3. Update package information, ensure that APT works with the https method, and that CA certificates are installed.
+
.. code-block:: bash
sudo su
@@ -14,13 +22,44 @@ Installation of docker, docker-compose, git and, azure-cli
azure-cli \
gnupg \
lsb-release
+
+4. Add Docker’s official GPG key:
+
+.. code-block:: bash
+
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+Verify that the key fingerprint is for example 9DC8 5822 9FC7 DD38 854A E2D8 8D81 803C 0EBF CD88
+
+.. code-block:: bash
+
+sudo apt-key fingerprint 0EBFCD88
+
+5. Fnd the entry in the table below which corresponds to your Ubuntu version. This determines
+where APT will search for Docker packages.
+
+Run the following command, substituting the entry for your operating system for the placeholder .
+
+.. code-block:: bash
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
+7. Update the APT package index by executing sudo apt-get update.
+
+.. code-block:: bash
apt-get update
+
+ 8. Verify that APT is pulling from the right repository. The version currently installed is marked with ***.
+
+ .. code-block:: bash
+
+ apt-cache policy docker-engine
+
+ 9. Install Docker Community Edition and git
+
+ .. code-block:: bash
+
apt-get install -y docker-ce docker-ce-cli containerd.io
curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
From 5a66cdf459e66f7e31fd9b89a29f2137584c4484 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 20 Feb 2023 11:34:33 +0100
Subject: [PATCH 230/354] Update installation.rst
---
doc/installation.rst | 3 +++
1 file changed, 3 insertions(+)
diff --git a/doc/installation.rst b/doc/installation.rst
index c3bf935..8c7b490 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -8,8 +8,11 @@ Update APT sources
------------------
This needs to be done so as to access packages from Docker repository.
+
1. Log into your VM machine as a user with sudo or root privileges.
+
2. Open a terminal window.
+
3. Update package information, ensure that APT works with the https method, and that CA certificates are installed.
.. code-block:: bash
From c0bd8bfdbe70b0d4cf104bc152d04cf80b2a32ae Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 20 Feb 2023 11:41:28 +0100
Subject: [PATCH 231/354] Update installation.rst
---
doc/installation.rst | 16 +++++++++-------
1 file changed, 9 insertions(+), 7 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 8c7b490..4c81c04 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -44,24 +44,26 @@ where APT will search for Docker packages.
Run the following command, substituting the entry for your operating system for the placeholder .
.. code-block:: bash
+
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
-7. Update the APT package index by executing sudo apt-get update.
+6. Update the APT package index by executing sudo apt-get update.
.. code-block:: bash
+
apt-get update
- 8. Verify that APT is pulling from the right repository. The version currently installed is marked with ***.
-
- .. code-block:: bash
+ 7. Verify that APT is pulling from the right repository. The version currently installed is marked with ***.
- apt-cache policy docker-engine
+.. code-block:: bash
+
+ apt-cache policy docker-engine
- 9. Install Docker Community Edition and git
+ 8. Install Docker Community Edition and git
- .. code-block:: bash
+.. code-block:: bash
apt-get install -y docker-ce docker-ce-cli containerd.io
curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
From 3d4dcc2e27ff46504c18e8d26700476482829747 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 20 Feb 2023 11:44:42 +0100
Subject: [PATCH 232/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 4c81c04..e14d120 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -55,13 +55,13 @@ Run the following command, substituting the entry for your operating system for
apt-get update
- 7. Verify that APT is pulling from the right repository. The version currently installed is marked with ***.
+7. Verify that APT is pulling from the right repository. The version currently installed is marked with ***.
.. code-block:: bash
apt-cache policy docker-engine
- 8. Install Docker Community Edition and git
+8. Install Docker Community Edition and git
.. code-block:: bash
From bb4fb04310cfd16b560e33f7d9bac1ea89dd4ad0 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 21 Feb 2023 08:21:14 +0000
Subject: [PATCH 233/354] update to 5.13
---
distros/dataverse.no/configs/schema.xml.5.13 | 1554 +++++++++++++++++
distros/dataverse.no/docker-compose.yaml | 4 +-
.../dataverse.no/init.d/055-dvwebloader.sh | 2 +-
3 files changed, 1558 insertions(+), 2 deletions(-)
create mode 100644 distros/dataverse.no/configs/schema.xml.5.13
diff --git a/distros/dataverse.no/configs/schema.xml.5.13 b/distros/dataverse.no/configs/schema.xml.5.13
new file mode 100644
index 0000000..f119386
--- /dev/null
+++ b/distros/dataverse.no/configs/schema.xml.5.13
@@ -0,0 +1,1554 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ id
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index b21890b..6c40ad2 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -95,7 +95,7 @@ services:
- "SOLR_OPTS=-Dlog4j2.formatMsgNoLookups=true"
volumes:
- solr-data:/var/solr/data
- - ./configs/schema.xml:/var/solr/data/collection1/conf/schema.xml
+ - ./configs/schema.xml.5.13:/var/solr/data/collection1/conf/schema.xml
labels:
- "traefik.enable=true"
- "traefik.http.routers.solr.rule=Host(`solr.${traefikhost}`)"
@@ -225,6 +225,8 @@ services:
- ${DOCROOT}/docroot:/opt/payara/docroot
- ./configs/domain.xml:/opt/payara/domain.xml
- ./init.d:/opt/payara/init.d
+ - ./configs/microprofile-config.properties:/opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/META-INF/microprofile-config.properties
+
# - /distrib/private/secrets/init_2_conf_payara.sh:/opt/payara/scripts/init_2_conf_payara.sh
- /mnt:/mnt
labels:
diff --git a/distros/dataverse.no/init.d/055-dvwebloader.sh b/distros/dataverse.no/init.d/055-dvwebloader.sh
index 1bf8d3d..a7da41a 100755
--- a/distros/dataverse.no/init.d/055-dvwebloader.sh
+++ b/distros/dataverse.no/init.d/055-dvwebloader.sh
@@ -1,4 +1,4 @@
#!/bin/bash
-curl -X PUT -d 'native/http,dcm/rsync+ssh,dvwebloader' http://localhost:8080/api/admin/settings/:UploadMethods
+curl -X PUT -d 'native/http,dvwebloader' http://localhost:8080/api/admin/settings/:UploadMethods
curl -X PUT -d 'https://gdcc.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl
From 7ce67de3dd7cc2c1a0cc7a5c880919ca5e516e45 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 21 Feb 2023 09:23:21 +0000
Subject: [PATCH 234/354]
distros/dataverse.no/configs/microprofile-config.properties
---
.../configs/microprofile-config.properties | 42 +++++++++++++++++++
1 file changed, 42 insertions(+)
create mode 100644 distros/dataverse.no/configs/microprofile-config.properties
diff --git a/distros/dataverse.no/configs/microprofile-config.properties b/distros/dataverse.no/configs/microprofile-config.properties
new file mode 100644
index 0000000..8c0b5a8
--- /dev/null
+++ b/distros/dataverse.no/configs/microprofile-config.properties
@@ -0,0 +1,42 @@
+# GENERAL
+# Will be replaced by Maven property in /target via filtering (see )
+dataverse.version=5.13
+dataverse.build=
+
+# Default only for containers! (keep mimicking the current behaviour -
+# changing that is part of https://github.com/IQSS/dataverse/issues/6636)
+%ct.dataverse.fqdn=${dataverse.fqdn}
+%ct.dataverse.siteUrl=https://${dataverse.fqdn}
+
+# FILES
+dataverse.files.directory=/tmp/dataverse
+
+# SEARCH INDEX
+dataverse.solr.host=localhost
+# Activating mp config profile -Dmp.config.profile=ct changes default to "solr" as DNS name
+%ct.dataverse.solr.host=solr
+dataverse.solr.port=8983
+dataverse.solr.protocol=http
+dataverse.solr.core=collection1
+dataverse.solr.path=/solr/${dataverse.solr.core}
+
+# DATABASE
+dataverse.db.host=localhost
+dataverse.db.port=5432
+dataverse.db.user=dataverse
+dataverse.db.name=dataverse
+
+# RSERVE
+dataverse.rserve.host=localhost
+dataverse.rserve.port=6311
+dataverse.rserve.user=rserve
+dataverse.rserve.password=rserve
+dataverse.rserve.tempdir=/tmp/Rserv
+
+# OAI SERVER
+dataverse.oai.server.maxidentifiers=100
+dataverse.oai.server.maxrecords=10
+dataverse.oai.server.maxsets=100
+# the OAI repository name, as shown by the Identify verb,
+# can be customized via the setting below:
+#dataverse.oai.server.repositoryname=
From f390711eb8117e0debaf0f45ae3710c59f77dc82 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 21 Feb 2023 09:27:22 +0000
Subject: [PATCH 235/354] updated microprofile with env variable
---
distros/dataverse.no/configs/microprofile-config.properties | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/configs/microprofile-config.properties b/distros/dataverse.no/configs/microprofile-config.properties
index 8c0b5a8..7259ea3 100644
--- a/distros/dataverse.no/configs/microprofile-config.properties
+++ b/distros/dataverse.no/configs/microprofile-config.properties
@@ -6,7 +6,7 @@ dataverse.build=
# Default only for containers! (keep mimicking the current behaviour -
# changing that is part of https://github.com/IQSS/dataverse/issues/6636)
%ct.dataverse.fqdn=${dataverse.fqdn}
-%ct.dataverse.siteUrl=https://${dataverse.fqdn}
+%ct.dataverse.siteUrl=${dataverse.siteUrl}
# FILES
dataverse.files.directory=/tmp/dataverse
From b57a61357f0bbd8a32bfdeb32001340f04a14d59 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 21 Feb 2023 10:02:11 +0000
Subject: [PATCH 236/354] updated microprofile to default
---
.../configs/microprofile-config.properties | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/distros/dataverse.no/configs/microprofile-config.properties b/distros/dataverse.no/configs/microprofile-config.properties
index 7259ea3..2ba9969 100644
--- a/distros/dataverse.no/configs/microprofile-config.properties
+++ b/distros/dataverse.no/configs/microprofile-config.properties
@@ -1,12 +1,12 @@
# GENERAL
# Will be replaced by Maven property in /target via filtering (see )
-dataverse.version=5.13
-dataverse.build=
+#dataverse.version=5.13
+#dataverse.build=
# Default only for containers! (keep mimicking the current behaviour -
# changing that is part of https://github.com/IQSS/dataverse/issues/6636)
-%ct.dataverse.fqdn=${dataverse.fqdn}
-%ct.dataverse.siteUrl=${dataverse.siteUrl}
+#%ct.dataverse.fqdn=localhost
+#%ct.dataverse.siteUrl=http://${dataverse.fqdn}:8080
# FILES
dataverse.files.directory=/tmp/dataverse
@@ -21,10 +21,10 @@ dataverse.solr.core=collection1
dataverse.solr.path=/solr/${dataverse.solr.core}
# DATABASE
-dataverse.db.host=localhost
-dataverse.db.port=5432
-dataverse.db.user=dataverse
-dataverse.db.name=dataverse
+#dataverse.db.host=localhost
+#dataverse.db.port=5432
+#dataverse.db.user=dataverse
+#dataverse.db.name=dataverse
# RSERVE
dataverse.rserve.host=localhost
From 0fa0486873b458ba1459e320d6bdc8f96b603c99 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 21 Feb 2023 10:06:17 +0000
Subject: [PATCH 237/354] automaticaly replace microprofile
---
distros/dataverse.no/init.d/211-microprofile.sh | 4 ++++
1 file changed, 4 insertions(+)
create mode 100644 distros/dataverse.no/init.d/211-microprofile.sh
diff --git a/distros/dataverse.no/init.d/211-microprofile.sh b/distros/dataverse.no/init.d/211-microprofile.sh
new file mode 100644
index 0000000..4bf9373
--- /dev/null
+++ b/distros/dataverse.no/init.d/211-microprofile.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/configs/microprofile-config.properties -O /tmp/microprofile-config.properties
+cp /tmp/microprofile-config.properties /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/META-INF/microprofile-config.properties
+touch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/.reload
From 4b06e698297ba6e1bac71ff42a332b62889541f1 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Wed, 22 Feb 2023 09:27:14 +0100
Subject: [PATCH 238/354] removed superfluous And
---
doc/maintenance.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/maintenance.rst b/doc/maintenance.rst
index 4091cc6..b15940d 100644
--- a/doc/maintenance.rst
+++ b/doc/maintenance.rst
@@ -149,7 +149,7 @@ in the following exemple = S3 and = 2002-gre
.. code-block:: sql
- UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE and o.dtype = 'DataFile' AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
+ UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%');
exemple to update for a specifics owner:
From 7b09ec21addb7df3694a2917f5681c7e20aa2138 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 24 Feb 2023 14:51:25 +0000
Subject: [PATCH 239/354] backup
---
distros/dataverse.no/docker-compose.yaml | 2 ++
distros/dataverse.no/init.d/cronjob/backupfiles.sh | 4 ++++
distros/dataverse.no/init.d/cronjob/backupfiles.sql | 5 +++++
3 files changed, 11 insertions(+)
create mode 100644 distros/dataverse.no/init.d/cronjob/backupfiles.sh
create mode 100644 distros/dataverse.no/init.d/cronjob/backupfiles.sql
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 6c40ad2..089b1d9 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -95,6 +95,7 @@ services:
- "SOLR_OPTS=-Dlog4j2.formatMsgNoLookups=true"
volumes:
- solr-data:/var/solr/data
+ # - ./configs/schema.xml:/var/solr/data/collection1/conf/schema.xml
- ./configs/schema.xml.5.13:/var/solr/data/collection1/conf/schema.xml
labels:
- "traefik.enable=true"
@@ -156,6 +157,7 @@ services:
- "baseurlstring"
- "aws_bucket_name"
- "aws_s3_profile"
+ - "aws_endpoint"
- "aws_endpoint_url"
- "aws_uit_bucket_name"
- "aws_uit_s3_profile"
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sh b/distros/dataverse.no/init.d/cronjob/backupfiles.sh
new file mode 100644
index 0000000..f9fa557
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+export PGPASSWORD=`cat /secrets/db/password`
+psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3
+#select storageidentifier from dvobject where modificationtime>='2023-02-02';
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
new file mode 100644
index 0000000..760180a
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
@@ -0,0 +1,5 @@
+select storageidentifier from dvobject where modificationtime>='2022-12-05';
+
+
+
+#10.
From 016226f30fb3939deb315f8d4076cc13efd8e319 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 27 Feb 2023 12:08:18 +0100
Subject: [PATCH 240/354] Update README.rst
---
doc/README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/README.rst b/doc/README.rst
index 531d36f..b0bea1e 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -16,7 +16,7 @@ SSH to a working VM as the administrator and make sure that you have sudo rights
The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
-- Login to DatavarseNO:
+- Authentication integration options (Login to DatavarseNO):
- [FEIDE SAML / OpenID](https://www.feide.no) - EIDE is a Norwegian governement solution for secure identification in the education sectors. We use FEIDE SAML as our identity management and sing sign on (SSO). FEIDE SAML activates single Sign On (SSO) for our Dataverse application.
- Azure OpenID - OpenID Connect is a security-token based extension of the OAuth 2.0 authorization protocol to do single sign-on. Azure OpenID supports single sign-on and API access to Dataverse application.
From e769eada7e9703add5a4a64410b7977c6b4298b6 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 27 Feb 2023 12:27:02 +0100
Subject: [PATCH 241/354] Create FAQ and Troubleshooting
---
doc/FAQ and Troubleshooting | 1 +
1 file changed, 1 insertion(+)
create mode 100644 doc/FAQ and Troubleshooting
diff --git a/doc/FAQ and Troubleshooting b/doc/FAQ and Troubleshooting
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/doc/FAQ and Troubleshooting
@@ -0,0 +1 @@
+
From e64dabf02b3855bbdb0e253f97137222ab50b6be Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 27 Feb 2023 12:27:52 +0100
Subject: [PATCH 242/354] Delete FAQ and Troubleshooting
---
doc/FAQ and Troubleshooting | 1 -
1 file changed, 1 deletion(-)
delete mode 100644 doc/FAQ and Troubleshooting
diff --git a/doc/FAQ and Troubleshooting b/doc/FAQ and Troubleshooting
deleted file mode 100644
index 8b13789..0000000
--- a/doc/FAQ and Troubleshooting
+++ /dev/null
@@ -1 +0,0 @@
-
From 236f68fb12c18d8d515d0dd1690904efecc4d1e6 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Mon, 27 Feb 2023 12:28:23 +0100
Subject: [PATCH 243/354] Create Troubleshooting
---
doc/Troubleshooting | 1 +
1 file changed, 1 insertion(+)
create mode 100644 doc/Troubleshooting
diff --git a/doc/Troubleshooting b/doc/Troubleshooting
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/doc/Troubleshooting
@@ -0,0 +1 @@
+
From 53d3cc93bf07a4ba2d56df4391a99256f69deec7 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 28 Feb 2023 09:10:50 +0000
Subject: [PATCH 244/354] backup lo local
---
.../dataverse.no/init.d/cronjob/backupData.sh | 20 +++++++++++++++++++
.../init.d/cronjob/backupfiles.sql | 8 ++++++--
2 files changed, 26 insertions(+), 2 deletions(-)
create mode 100755 distros/dataverse.no/init.d/cronjob/backupData.sh
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
new file mode 100755
index 0000000..7d1f5e1
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+export PGPASSWORD=`cat /secrets/db/password`
+cp -r /secrets/aws-cli/.aws ~
+
+
+#file=10.21337/OZ4JBV/1869225dfbd-4edecc03da9e
+
+files=`psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2]}'`
+
+for file in $files
+ do
+ echo $file
+ aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file
+ echo "doneit"
+
+ done
+#echo $files
+
+#cp -r /secrets/aws-cli/.aws ~
+#aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
index 760180a..469cde7 100644
--- a/distros/dataverse.no/init.d/cronjob/backupfiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
@@ -1,5 +1,9 @@
-select storageidentifier from dvobject where modificationtime>='2022-12-05';
+--select storageidentifier from dvobject where modificationtime>='2022-12-05';
+select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile';
-#10.
+
+
+--select split_part(identifier, '/', 1) from dvobject where storageidentifier like '%S3:%' and identifier like '%/%' limit 10;
+--select storageidentifier, CONCAT(authority, '/', identifier, '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' limit 10;
From ec8209fce603d06c3c6f42d6177b6f4949fe3bec Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 28 Feb 2023 10:24:56 +0100
Subject: [PATCH 245/354] reverse to local
---
doc/maintenance.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/doc/maintenance.rst b/doc/maintenance.rst
index b15940d..0ed20ff 100644
--- a/doc/maintenance.rst
+++ b/doc/maintenance.rst
@@ -179,3 +179,10 @@ Delete action logs older then 90 days
DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days';
+
+.. code-block:: sql
+
+ dataverse=# UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'S3://2002-green-dataversenotest1:','file://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%S3://%');
+ dataverse=# UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'S3://','file://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%S3://%');
+
+
From e76fc85d5901f58909a996eb8a6a570a8f325499 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 28 Feb 2023 09:47:28 +0000
Subject: [PATCH 246/354] update to only select the files of the day
---
distros/dataverse.no/init.d/cronjob/backupData.sh | 9 ++++++---
distros/dataverse.no/init.d/cronjob/backupfiles.sql | 2 +-
2 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
index 7d1f5e1..0a035f3 100755
--- a/distros/dataverse.no/init.d/cronjob/backupData.sh
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -9,9 +9,12 @@ files=`psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjo
for file in $files
do
- echo $file
- aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file
- echo "doneit"
+ if [ -f /data/$file ]; then
+ echo "$file already copied"
+ else
+ aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file
+ echo "doneit"
+ fi
done
#echo $files
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
index 469cde7..1c2e6e5 100644
--- a/distros/dataverse.no/init.d/cronjob/backupfiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
@@ -1,7 +1,7 @@
--select storageidentifier from dvobject where modificationtime>='2022-12-05';
-select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile';
+select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=CURRENT_DATE;
From 390f8fa9b3757774efec22a12764b8d2519ca88a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 28 Feb 2023 09:54:06 +0000
Subject: [PATCH 247/354] updated to get the files from the last tow days
---
distros/dataverse.no/init.d/cronjob/backupfiles.sql | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
index 1c2e6e5..1fd5b04 100644
--- a/distros/dataverse.no/init.d/cronjob/backupfiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
@@ -1,7 +1,7 @@
--select storageidentifier from dvobject where modificationtime>='2022-12-05';
-select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=CURRENT_DATE;
+select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day');
From eedd5fe28e9f26e1c9f2852c8eb82a21d08b3228 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 28 Feb 2023 12:46:06 +0100
Subject: [PATCH 248/354] fix for geodata
---
.../_dvno_geolocation_cleaning_v5_13.sql | 3259 +++++++++++++++++
1 file changed, 3259 insertions(+)
create mode 100644 distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
new file mode 100644
index 0000000..cb9acd5
--- /dev/null
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
@@ -0,0 +1,3259 @@
+update datasetfieldvalue set value='71.2' where id=6831;
+update datasetfieldvalue set value='31.2' where id=6832;
+update datasetfieldvalue set value='4.5' where id=6833;
+update datasetfieldvalue set value='57.9' where id=6835;
+update datasetfieldvalue set value='31.2' where id=6878;
+update datasetfieldvalue set value='57.9' where id=6888;
+update datasetfieldvalue set value='71.2' where id=6889;
+update datasetfieldvalue set value='4.5' where id=6892;
+update datasetfieldvalue set value='71.1' where id=15257;
+update datasetfieldvalue set value='60.8' where id=15258;
+update datasetfieldvalue set value='31.1' where id=15259;
+update datasetfieldvalue set value='58.0' where id=15260;
+update datasetfieldvalue set value='57.76' where id=17053;
+update datasetfieldvalue set value='71.38' where id=17059;
+update datasetfieldvalue set value='4.09' where id=17064;
+update datasetfieldvalue set value='31.76' where id=17073;
+update datasetfieldvalue set value='19.216667' where id=17509;
+update datasetfieldvalue set value='19.216667' where id=17514;
+update datasetfieldvalue set value='69.583333' where id=17531;
+update datasetfieldvalue set value='69.583333' where id=17543;
+update datasetfieldvalue set value='19.216667' where id=17689;
+update datasetfieldvalue set value='19.216667' where id=17690;
+update datasetfieldvalue set value='69.583333' where id=17703;
+update datasetfieldvalue set value='69.583333' where id=17705;
+update datasetfieldvalue set value='31.1' where id=17841;
+update datasetfieldvalue set value='58.0 ' where id=17846;
+update datasetfieldvalue set value='71.1' where id=17848;
+update datasetfieldvalue set value='60.8 ' where id=17852;
+update datasetfieldvalue set value='71.1' where id=17871;
+update datasetfieldvalue set value='60.8 ' where id=17874;
+update datasetfieldvalue set value='58.0 ' where id=17889;
+update datasetfieldvalue set value='31.1' where id=17891;
+update datasetfieldvalue set value='69.583333' where id=17930;
+update datasetfieldvalue set value='19.216667' where id=17939;
+update datasetfieldvalue set value='19.216667' where id=17944;
+update datasetfieldvalue set value='69.583333' where id=17949;
+update datasetfieldvalue set value='69.583333' where id=17980;
+update datasetfieldvalue set value='19.216667' where id=17991;
+update datasetfieldvalue set value='19.216667' where id=18000;
+update datasetfieldvalue set value='69.583333' where id=18008;
+update datasetfieldvalue set value='69.583333' where id=18015;
+update datasetfieldvalue set value='19.216667' where id=18025;
+update datasetfieldvalue set value='19.216667' where id=18038;
+update datasetfieldvalue set value='69.583333' where id=18055;
+update datasetfieldvalue set value='69.583333' where id=18058;
+update datasetfieldvalue set value='19.216667' where id=18076;
+update datasetfieldvalue set value='19.216667' where id=18092;
+update datasetfieldvalue set value='69.583333' where id=18097;
+update datasetfieldvalue set value='69.583333' where id=18103;
+update datasetfieldvalue set value='19.216667' where id=18104;
+update datasetfieldvalue set value='69.583333' where id=18130;
+update datasetfieldvalue set value='19.216667' where id=18134;
+update datasetfieldvalue set value='69.583333' where id=18141;
+update datasetfieldvalue set value='19.216667' where id=18152;
+update datasetfieldvalue set value='19.216667' where id=18179;
+update datasetfieldvalue set value='69.583333' where id=18181;
+update datasetfieldvalue set value='69.583333' where id=18189;
+update datasetfieldvalue set value='19.216667' where id=18197;
+update datasetfieldvalue set value='69.583333' where id=18205;
+update datasetfieldvalue set value='19.216667' where id=18206;
+update datasetfieldvalue set value='19.216667' where id=18227;
+update datasetfieldvalue set value='19.216667' where id=18233;
+update datasetfieldvalue set value='69.583333' where id=18235;
+update datasetfieldvalue set value='69.583333' where id=18247;
+update datasetfieldvalue set value='69.583333' where id=18274;
+update datasetfieldvalue set value='19.216667' where id=18275;
+update datasetfieldvalue set value='69.583333' where id=18292;
+update datasetfieldvalue set value='19.216667' where id=18304;
+update datasetfieldvalue set value='69.583333' where id=18320;
+update datasetfieldvalue set value='19.216667' where id=18324;
+update datasetfieldvalue set value='69.583333' where id=18328;
+update datasetfieldvalue set value='19.216667' where id=18345;
+update datasetfieldvalue set value='19.216667' where id=18353;
+update datasetfieldvalue set value='69.583333' where id=18360;
+update datasetfieldvalue set value='19.216667' where id=18362;
+update datasetfieldvalue set value='69.583333' where id=18365;
+update datasetfieldvalue set value='69.583333' where id=18395;
+update datasetfieldvalue set value='19.216667' where id=18405;
+update datasetfieldvalue set value='69.583333' where id=18409;
+update datasetfieldvalue set value='19.216667' where id=18424;
+update datasetfieldvalue set value='69.583333' where id=18435;
+update datasetfieldvalue set value='69.583333' where id=18462;
+update datasetfieldvalue set value='19.216667' where id=18465;
+update datasetfieldvalue set value='19.216667' where id=18470;
+update datasetfieldvalue set value='19.216667' where id=18488;
+update datasetfieldvalue set value='69.583333' where id=18502;
+update datasetfieldvalue set value='69.583333' where id=18503;
+update datasetfieldvalue set value='19.216667' where id=18508;
+update datasetfieldvalue set value='69.583333' where id=18524;
+update datasetfieldvalue set value='19.216667' where id=18530;
+update datasetfieldvalue set value='69.583333' where id=18553;
+update datasetfieldvalue set value='19.216667' where id=18559;
+update datasetfieldvalue set value='19.216667' where id=18570;
+update datasetfieldvalue set value='69.583333' where id=18581;
+update datasetfieldvalue set value='19.216667' where id=18589;
+update datasetfieldvalue set value='69.583333' where id=18597;
+update datasetfieldvalue set value='19.216667' where id=18605;
+update datasetfieldvalue set value='69.583333' where id=18609;
+update datasetfieldvalue set value='69.583333' where id=18617;
+update datasetfieldvalue set value='19.216667' where id=18619;
+update datasetfieldvalue set value='19.216667' where id=18656;
+update datasetfieldvalue set value='69.583333' where id=18663;
+update datasetfieldvalue set value='69.583333' where id=18670;
+update datasetfieldvalue set value='19.216667' where id=18684;
+update datasetfieldvalue set value='19.216667' where id=18689;
+update datasetfieldvalue set value='19.216667' where id=18698;
+update datasetfieldvalue set value='69.583333' where id=18700;
+update datasetfieldvalue set value='69.583333' where id=18726;
+update datasetfieldvalue set value='19.216667' where id=18733;
+update datasetfieldvalue set value='19.216667' where id=18749;
+update datasetfieldvalue set value='69.583333' where id=18757;
+update datasetfieldvalue set value='69.583333' where id=18760;
+update datasetfieldvalue set value='19.216667' where id=18774;
+update datasetfieldvalue set value='69.583333' where id=18776;
+update datasetfieldvalue set value='69.583333' where id=18805;
+update datasetfieldvalue set value='19.216667' where id=18806;
+update datasetfieldvalue set value='69.583333' where id=18822;
+update datasetfieldvalue set value='19.216667' where id=18848;
+update datasetfieldvalue set value='19.216667' where id=18849;
+update datasetfieldvalue set value='69.583333' where id=18850;
+update datasetfieldvalue set value='19.216667' where id=18856;
+update datasetfieldvalue set value='69.583333' where id=18857;
+update datasetfieldvalue set value='69.583333' where id=18880;
+update datasetfieldvalue set value='19.216667' where id=18889;
+update datasetfieldvalue set value='69.583333' where id=18909;
+update datasetfieldvalue set value='19.216667' where id=18911;
+update datasetfieldvalue set value='69.583333' where id=18920;
+update datasetfieldvalue set value='19.216667' where id=18937;
+update datasetfieldvalue set value='69.583333' where id=18956;
+update datasetfieldvalue set value='19.216667' where id=18963;
+update datasetfieldvalue set value='19.216667' where id=18969;
+update datasetfieldvalue set value='69.583333' where id=18979;
+update datasetfieldvalue set value='69.583333' where id=18982;
+update datasetfieldvalue set value='19.216667' where id=18996;
+update datasetfieldvalue set value='69.583333' where id=19014;
+update datasetfieldvalue set value='19.216667' where id=19017;
+update datasetfieldvalue set value='19.216667' where id=19026;
+update datasetfieldvalue set value='69.583333' where id=19044;
+update datasetfieldvalue set value='69.583333' where id=19047;
+update datasetfieldvalue set value='19.216667' where id=19052;
+update datasetfieldvalue set value='19.216667' where id=19072;
+update datasetfieldvalue set value='69.583333' where id=19083;
+update datasetfieldvalue set value='69.583333' where id=19090;
+update datasetfieldvalue set value='19.216667' where id=19098;
+update datasetfieldvalue set value='69.583333' where id=19119;
+update datasetfieldvalue set value='69.583333' where id=19141;
+update datasetfieldvalue set value='19.216667' where id=19142;
+update datasetfieldvalue set value='19.216667' where id=19146;
+update datasetfieldvalue set value='19.216667' where id=19157;
+update datasetfieldvalue set value='69.583333' where id=19172;
+update datasetfieldvalue set value='69.583333' where id=19178;
+update datasetfieldvalue set value='19.216667' where id=19180;
+update datasetfieldvalue set value='69.583333' where id=19194;
+update datasetfieldvalue set value='19.216667' where id=19196;
+update datasetfieldvalue set value='69.583333' where id=19197;
+update datasetfieldvalue set value='19.216667' where id=19204;
+update datasetfieldvalue set value='19.216667' where id=19238;
+update datasetfieldvalue set value='69.583333' where id=19246;
+update datasetfieldvalue set value='19.216667' where id=19251;
+update datasetfieldvalue set value='69.583333' where id=19271;
+update datasetfieldvalue set value='19.216667' where id=19283;
+update datasetfieldvalue set value='69.583333' where id=19289;
+update datasetfieldvalue set value='19.216667' where id=19294;
+update datasetfieldvalue set value='69.583333' where id=19303;
+update datasetfieldvalue set value='69.583333' where id=19319;
+update datasetfieldvalue set value='69.583333' where id=19348;
+update datasetfieldvalue set value='19.216667' where id=19350;
+update datasetfieldvalue set value='19.216667' where id=19354;
+update datasetfieldvalue set value='19.216667' where id=19362;
+update datasetfieldvalue set value='69.583333' where id=19368;
+update datasetfieldvalue set value='69.583333' where id=19370;
+update datasetfieldvalue set value='19.216667' where id=19393;
+update datasetfieldvalue set value='69.583333' where id=19409;
+update datasetfieldvalue set value='69.583333' where id=19413;
+update datasetfieldvalue set value='19.216667' where id=19417;
+update datasetfieldvalue set value='19.216667' where id=19442;
+update datasetfieldvalue set value='19.216667' where id=19449;
+update datasetfieldvalue set value='69.583333' where id=19452;
+update datasetfieldvalue set value='69.583333' where id=19465;
+update datasetfieldvalue set value='19.216667' where id=19478;
+update datasetfieldvalue set value='19.216667' where id=19486;
+update datasetfieldvalue set value='69.583333' where id=19498;
+update datasetfieldvalue set value='19.216667' where id=19506;
+update datasetfieldvalue set value='69.583333' where id=19508;
+update datasetfieldvalue set value='69.583333' where id=19532;
+update datasetfieldvalue set value='69.583333' where id=19538;
+update datasetfieldvalue set value='19.216667' where id=19541;
+update datasetfieldvalue set value='19.216667' where id=19561;
+update datasetfieldvalue set value='19.216667' where id=19570;
+update datasetfieldvalue set value='69.583333' where id=19594;
+update datasetfieldvalue set value='69.583333' where id=19603;
+update datasetfieldvalue set value='19.216667' where id=19611;
+update datasetfieldvalue set value='69.583333' where id=19613;
+update datasetfieldvalue set value='19.216667' where id=19616;
+update datasetfieldvalue set value='69.583333' where id=19630;
+update datasetfieldvalue set value='19.216667' where id=19653;
+update datasetfieldvalue set value='69.583333' where id=19659;
+update datasetfieldvalue set value='19.216667' where id=19674;
+update datasetfieldvalue set value='19.216667' where id=19677;
+update datasetfieldvalue set value='69.583333' where id=19684;
+update datasetfieldvalue set value='69.583333' where id=19714;
+update datasetfieldvalue set value='19.216667' where id=19733;
+update datasetfieldvalue set value='19.216667' where id=19735;
+update datasetfieldvalue set value='69.583333' where id=19736;
+update datasetfieldvalue set value='69.583333' where id=19744;
+update datasetfieldvalue set value='69.583333' where id=19768;
+update datasetfieldvalue set value='19.216667' where id=19770;
+update datasetfieldvalue set value='19.216667' where id=19777;
+update datasetfieldvalue set value='69.583333' where id=19780;
+update datasetfieldvalue set value='19.216667' where id=19786;
+update datasetfieldvalue set value='69.583333' where id=19800;
+update datasetfieldvalue set value='19.216667' where id=19809;
+update datasetfieldvalue set value='69.583333' where id=19829;
+update datasetfieldvalue set value='19.216667' where id=19830;
+update datasetfieldvalue set value='19.216667' where id=19837;
+update datasetfieldvalue set value='69.583333' where id=19863;
+update datasetfieldvalue set value='69.583333' where id=19896;
+update datasetfieldvalue set value='69.583333' where id=19909;
+update datasetfieldvalue set value='19.216667' where id=19912;
+update datasetfieldvalue set value='19.216667' where id=19927;
+update datasetfieldvalue set value='19.216667' where id=19939;
+update datasetfieldvalue set value='69.583333' where id=19940;
+update datasetfieldvalue set value='19.216667' where id=19944;
+update datasetfieldvalue set value='69.583333' where id=19947;
+update datasetfieldvalue set value='19.216667' where id=19977;
+update datasetfieldvalue set value='69.583333' where id=19982;
+update datasetfieldvalue set value='19.216667' where id=20008;
+update datasetfieldvalue set value='69.583333' where id=20010;
+update datasetfieldvalue set value='69.583333' where id=20018;
+update datasetfieldvalue set value='19.216667' where id=20021;
+update datasetfieldvalue set value='69.583333' where id=20023;
+update datasetfieldvalue set value='19.216667' where id=20034;
+update datasetfieldvalue set value='69.583333' where id=20071;
+update datasetfieldvalue set value='69.583333' where id=20080;
+update datasetfieldvalue set value='19.216667' where id=20083;
+update datasetfieldvalue set value='19.216667' where id=20097;
+update datasetfieldvalue set value='69.583333' where id=20102;
+update datasetfieldvalue set value='19.216667' where id=20122;
+update datasetfieldvalue set value='19.216667' where id=20123;
+update datasetfieldvalue set value='69.583333' where id=20135;
+update datasetfieldvalue set value='69.583333' where id=20152;
+update datasetfieldvalue set value='19.216667' where id=20165;
+update datasetfieldvalue set value='69.583333' where id=20166;
+update datasetfieldvalue set value='19.216667' where id=20183;
+update datasetfieldvalue set value='69.583333' where id=20205;
+update datasetfieldvalue set value='19.216667' where id=20208;
+update datasetfieldvalue set value='19.216667' where id=20209;
+update datasetfieldvalue set value='69.583333' where id=20226;
+update datasetfieldvalue set value='19.216667' where id=20242;
+update datasetfieldvalue set value='69.583333' where id=20243;
+update datasetfieldvalue set value='69.583333' where id=20259;
+update datasetfieldvalue set value='19.216667' where id=20269;
+update datasetfieldvalue set value='19.216667' where id=20276;
+update datasetfieldvalue set value='19.216667' where id=20299;
+update datasetfieldvalue set value='69.583333' where id=20308;
+update datasetfieldvalue set value='69.583333' where id=20310;
+update datasetfieldvalue set value='69.583333' where id=20315;
+update datasetfieldvalue set value='19.216667' where id=20316;
+update datasetfieldvalue set value='69.583333' where id=20330;
+update datasetfieldvalue set value='19.216667' where id=20344;
+update datasetfieldvalue set value='69.583333' where id=20358;
+update datasetfieldvalue set value='19.216667' where id=20361;
+update datasetfieldvalue set value='69.583333' where id=20379;
+update datasetfieldvalue set value='19.216667' where id=20394;
+update datasetfieldvalue set value='19.216667' where id=20399;
+update datasetfieldvalue set value='69.583333' where id=20406;
+update datasetfieldvalue set value='69.583333' where id=20416;
+update datasetfieldvalue set value='19.216667' where id=20417;
+update datasetfieldvalue set value='19.216667' where id=20443;
+update datasetfieldvalue set value='69.583333' where id=20454;
+update datasetfieldvalue set value='69.583333' where id=20458;
+update datasetfieldvalue set value='19.216667' where id=20471;
+update datasetfieldvalue set value='69.583333' where id=20488;
+update datasetfieldvalue set value='19.216667' where id=20492;
+update datasetfieldvalue set value='19.216667' where id=20503;
+update datasetfieldvalue set value='69.583333' where id=20511;
+update datasetfieldvalue set value='69.583333' where id=20523;
+update datasetfieldvalue set value='19.216667' where id=20528;
+update datasetfieldvalue set value='69.583333' where id=20552;
+update datasetfieldvalue set value='19.216667' where id=20559;
+update datasetfieldvalue set value='69.583333' where id=20567;
+update datasetfieldvalue set value='69.583333' where id=20569;
+update datasetfieldvalue set value='19.216667' where id=20570;
+update datasetfieldvalue set value='19.216667' where id=20575;
+update datasetfieldvalue set value='69.583333' where id=20608;
+update datasetfieldvalue set value='19.216667' where id=20617;
+update datasetfieldvalue set value='69.583333' where id=20637;
+update datasetfieldvalue set value='19.216667' where id=20639;
+update datasetfieldvalue set value='69.583333' where id=20650;
+update datasetfieldvalue set value='19.216667' where id=20651;
+update datasetfieldvalue set value='19.216667' where id=20669;
+update datasetfieldvalue set value='69.583333' where id=20673;
+update datasetfieldvalue set value='19.216667' where id=20694;
+update datasetfieldvalue set value='69.583333' where id=20700;
+update datasetfieldvalue set value='19.216667' where id=20718;
+update datasetfieldvalue set value='69.583333' where id=20728;
+update datasetfieldvalue set value='19.216667' where id=20737;
+update datasetfieldvalue set value='19.216667' where id=20749;
+update datasetfieldvalue set value='69.583333' where id=20768;
+update datasetfieldvalue set value='69.583333' where id=20769;
+update datasetfieldvalue set value='19.216667' where id=20784;
+update datasetfieldvalue set value='69.583333' where id=20799;
+update datasetfieldvalue set value='19.216667' where id=20800;
+update datasetfieldvalue set value='69.583333' where id=20803;
+update datasetfieldvalue set value='69.583333' where id=20816;
+update datasetfieldvalue set value='19.216667' where id=20844;
+update datasetfieldvalue set value='69.583333' where id=20848;
+update datasetfieldvalue set value='19.216667' where id=20853;
+update datasetfieldvalue set value='19.216667' where id=20859;
+update datasetfieldvalue set value='69.583333' where id=20886;
+update datasetfieldvalue set value='69.583333' where id=20887;
+update datasetfieldvalue set value='19.216667' where id=20888;
+update datasetfieldvalue set value='19.216667' where id=20912;
+update datasetfieldvalue set value='19.216667' where id=20913;
+update datasetfieldvalue set value='69.583333' where id=20926;
+update datasetfieldvalue set value='69.583333' where id=20933;
+update datasetfieldvalue set value='19.216667' where id=20944;
+update datasetfieldvalue set value='19.216667' where id=20955;
+update datasetfieldvalue set value='69.583333' where id=20958;
+update datasetfieldvalue set value='69.583333' where id=20975;
+update datasetfieldvalue set value='19.216667' where id=20991;
+update datasetfieldvalue set value='69.583333' where id=20993;
+update datasetfieldvalue set value='69.583333' where id=21005;
+update datasetfieldvalue set value='19.216667' where id=21022;
+update datasetfieldvalue set value='69.583333' where id=21042;
+update datasetfieldvalue set value='19.216667' where id=21046;
+update datasetfieldvalue set value='69.583333' where id=21055;
+update datasetfieldvalue set value='19.216667' where id=21056;
+update datasetfieldvalue set value='69.583333' where id=21068;
+update datasetfieldvalue set value='69.583333' where id=21069;
+update datasetfieldvalue set value='19.216667' where id=21099;
+update datasetfieldvalue set value='19.216667' where id=21102;
+update datasetfieldvalue set value='69.583333' where id=21110;
+update datasetfieldvalue set value='19.216667' where id=21124;
+update datasetfieldvalue set value='69.583333' where id=21131;
+update datasetfieldvalue set value='19.216667' where id=21150;
+update datasetfieldvalue set value='69.583333' where id=21154;
+update datasetfieldvalue set value='19.216667' where id=21179;
+update datasetfieldvalue set value='69.583333' where id=21188;
+update datasetfieldvalue set value='19.216667' where id=21193;
+update datasetfieldvalue set value='69.583333' where id=21196;
+update datasetfieldvalue set value='19.216667' where id=21198;
+update datasetfieldvalue set value='69.583333' where id=21218;
+update datasetfieldvalue set value='19.216667' where id=21232;
+update datasetfieldvalue set value='69.583333' where id=21251;
+update datasetfieldvalue set value='19.216667' where id=21261;
+update datasetfieldvalue set value='69.583333' where id=21274;
+update datasetfieldvalue set value='19.216667' where id=21277;
+update datasetfieldvalue set value='69.583333' where id=21284;
+update datasetfieldvalue set value='69.583333' where id=21301;
+update datasetfieldvalue set value='19.216667' where id=21308;
+update datasetfieldvalue set value='19.216667' where id=21318;
+update datasetfieldvalue set value='19.216667' where id=21332;
+update datasetfieldvalue set value='19.216667' where id=21354;
+update datasetfieldvalue set value='69.583333' where id=21357;
+update datasetfieldvalue set value='69.583333' where id=21361;
+update datasetfieldvalue set value='69.583333' where id=21390;
+update datasetfieldvalue set value='69.583333' where id=21395;
+update datasetfieldvalue set value='19.216667' where id=21399;
+update datasetfieldvalue set value='19.216667' where id=21402;
+update datasetfieldvalue set value='19.216667' where id=21424;
+update datasetfieldvalue set value='69.583333' where id=21434;
+update datasetfieldvalue set value='69.583333' where id=21444;
+update datasetfieldvalue set value='19.216667' where id=21445;
+update datasetfieldvalue set value='19.216667' where id=21446;
+update datasetfieldvalue set value='69.583333' where id=21453;
+update datasetfieldvalue set value='69.583333' where id=21467;
+update datasetfieldvalue set value='19.216667' where id=21473;
+update datasetfieldvalue set value='19.216667' where id=21491;
+update datasetfieldvalue set value='69.583333' where id=21499;
+update datasetfieldvalue set value='69.583333' where id=21513;
+update datasetfieldvalue set value='19.216667' where id=21514;
+update datasetfieldvalue set value='69.583333' where id=21546;
+update datasetfieldvalue set value='19.216667' where id=21550;
+update datasetfieldvalue set value='19.216667' where id=21557;
+update datasetfieldvalue set value='69.583333' where id=21567;
+update datasetfieldvalue set value='19.216667' where id=21590;
+update datasetfieldvalue set value='69.583333' where id=21606;
+update datasetfieldvalue set value='19.216667' where id=21610;
+update datasetfieldvalue set value='69.583333' where id=21611;
+update datasetfieldvalue set value='69.583333' where id=21954;
+update datasetfieldvalue set value='69.583333' where id=21962;
+update datasetfieldvalue set value='19.216667' where id=21969;
+update datasetfieldvalue set value='19.216667' where id=21971;
+update datasetfieldvalue set value='31.76' where id=22305;
+update datasetfieldvalue set value='57.76' where id=22308;
+update datasetfieldvalue set value='4.09' where id=22333;
+update datasetfieldvalue set value='71.38' where id=22343;
+update datasetfieldvalue set value='19.216667' where id=22364;
+update datasetfieldvalue set value='69.583333' where id=22368;
+update datasetfieldvalue set value='69.583333' where id=22378;
+update datasetfieldvalue set value='19.216667' where id=22385;
+update datasetfieldvalue set value='19.216667' where id=22465;
+update datasetfieldvalue set value='69.583333' where id=22469;
+update datasetfieldvalue set value='69.583333' where id=22487;
+update datasetfieldvalue set value='19.216667' where id=22489;
+update datasetfieldvalue set value='9.333333' where id=22494;
+update datasetfieldvalue set value='78.666667' where id=22495;
+update datasetfieldvalue set value='9.666667' where id=22496;
+update datasetfieldvalue set value='78.5' where id=22497;
+update datasetfieldvalue set value='16.148889' where id=22621;
+update datasetfieldvalue set value='76.147778' where id=22622;
+update datasetfieldvalue set value='76.084167' where id=22623;
+update datasetfieldvalue set value='15.824444' where id=22625;
+update datasetfieldvalue set value='69.583333' where id=22930;
+update datasetfieldvalue set value='69.583333' where id=22947;
+update datasetfieldvalue set value='19.216667' where id=22950;
+update datasetfieldvalue set value='19.216667' where id=22956;
+update datasetfieldvalue set value='19.216667' where id=22967;
+update datasetfieldvalue set value='19.216667' where id=22969;
+update datasetfieldvalue set value='69.583333' where id=22979;
+update datasetfieldvalue set value='69.583333' where id=23007;
+update datasetfieldvalue set value='19.216667' where id=23021;
+update datasetfieldvalue set value='69.583333' where id=23027;
+update datasetfieldvalue set value='69.583333' where id=23031;
+update datasetfieldvalue set value='19.216667' where id=23049;
+update datasetfieldvalue set value='19.216667' where id=23057;
+update datasetfieldvalue set value='19.216667' where id=23060;
+update datasetfieldvalue set value='69.583333' where id=23078;
+update datasetfieldvalue set value='69.583333' where id=23088;
+update datasetfieldvalue set value='69.583333' where id=23488;
+update datasetfieldvalue set value='19.216667' where id=23498;
+update datasetfieldvalue set value='19.216667' where id=23511;
+update datasetfieldvalue set value='69.583333' where id=23513;
+update datasetfieldvalue set value='19.216667' where id=23579;
+update datasetfieldvalue set value='19.216667' where id=23592;
+update datasetfieldvalue set value='69.583333' where id=23593;
+update datasetfieldvalue set value='69.583333' where id=23597;
+update datasetfieldvalue set value='69.583333' where id=23603;
+update datasetfieldvalue set value='19.216667' where id=23634;
+update datasetfieldvalue set value='69.583333' where id=23635;
+update datasetfieldvalue set value='19.216667' where id=23639;
+update datasetfieldvalue set value='19.216667' where id=23652;
+update datasetfieldvalue set value='69.583333' where id=23670;
+update datasetfieldvalue set value='19.216667' where id=23675;
+update datasetfieldvalue set value='69.583333' where id=23676;
+update datasetfieldvalue set value='19.216667' where id=23695;
+update datasetfieldvalue set value='19.216667' where id=23714;
+update datasetfieldvalue set value='69.583333' where id=23716;
+update datasetfieldvalue set value='69.583333' where id=23724;
+update datasetfieldvalue set value='19.216667' where id=23732;
+update datasetfieldvalue set value='69.583333' where id=23747;
+update datasetfieldvalue set value='19.216667' where id=23751;
+update datasetfieldvalue set value='69.583333' where id=23763;
+update datasetfieldvalue set value='69.583333' where id=23768;
+update datasetfieldvalue set value='69.583333' where id=23786;
+update datasetfieldvalue set value='19.216667' where id=23787;
+update datasetfieldvalue set value='19.216667' where id=23795;
+update datasetfieldvalue set value='78.666667' where id=23900;
+update datasetfieldvalue set value='9.666667' where id=23936;
+update datasetfieldvalue set value='78.5' where id=23940;
+update datasetfieldvalue set value='9.333333' where id=23944;
+update datasetfieldvalue set value='19.216667' where id=23958;
+update datasetfieldvalue set value='69.583333' where id=23963;
+update datasetfieldvalue set value='19.216667' where id=23965;
+update datasetfieldvalue set value='69.583333' where id=23972;
+update datasetfieldvalue set value='19.216667' where id=24009;
+update datasetfieldvalue set value='69.583333' where id=24015;
+update datasetfieldvalue set value='69.583333' where id=24016;
+update datasetfieldvalue set value='19.216667' where id=24035;
+update datasetfieldvalue set value='69.583333' where id=24037;
+update datasetfieldvalue set value='19.216667' where id=24048;
+update datasetfieldvalue set value='69.583333' where id=24064;
+update datasetfieldvalue set value='19.216667' where id=24065;
+update datasetfieldvalue set value='69.583333' where id=24090;
+update datasetfieldvalue set value='19.216667' where id=24092;
+update datasetfieldvalue set value='69.583333' where id=24098;
+update datasetfieldvalue set value='19.216667' where id=24105;
+update datasetfieldvalue set value='69.583333' where id=24123;
+update datasetfieldvalue set value='69.583333' where id=24127;
+update datasetfieldvalue set value='19.216667' where id=24129;
+update datasetfieldvalue set value='19.216667' where id=24144;
+update datasetfieldvalue set value='19.216667' where id=24168;
+update datasetfieldvalue set value='69.583333' where id=24177;
+update datasetfieldvalue set value='69.583333' where id=24198;
+update datasetfieldvalue set value='19.216667' where id=24202;
+update datasetfieldvalue set value='19.216667' where id=24215;
+update datasetfieldvalue set value='19.216667' where id=24226;
+update datasetfieldvalue set value='69.583333' where id=24241;
+update datasetfieldvalue set value='69.583333' where id=24243;
+update datasetfieldvalue set value='69.583333' where id=24252;
+update datasetfieldvalue set value='19.216667' where id=24253;
+update datasetfieldvalue set value='19.216667' where id=24265;
+update datasetfieldvalue set value='69.583333' where id=24283;
+update datasetfieldvalue set value='69.583333' where id=24291;
+update datasetfieldvalue set value='19.216667' where id=24294;
+update datasetfieldvalue set value='69.583333' where id=24303;
+update datasetfieldvalue set value='19.216667' where id=24329;
+update datasetfieldvalue set value='69.583333' where id=24331;
+update datasetfieldvalue set value='19.216667' where id=24351;
+update datasetfieldvalue set value='69.583333' where id=24352;
+update datasetfieldvalue set value='19.216667' where id=24360;
+update datasetfieldvalue set value='19.216667' where id=24377;
+update datasetfieldvalue set value='69.583333' where id=24393;
+update datasetfieldvalue set value='69.583333' where id=24404;
+update datasetfieldvalue set value='19.216667' where id=24406;
+update datasetfieldvalue set value='69.583333' where id=24415;
+update datasetfieldvalue set value='69.583333' where id=24426;
+update datasetfieldvalue set value='19.216667' where id=24440;
+update datasetfieldvalue set value='19.216667' where id=24455;
+update datasetfieldvalue set value='69.583333' where id=24457;
+update datasetfieldvalue set value='19.216667' where id=24458;
+update datasetfieldvalue set value='19.216667' where id=24474;
+update datasetfieldvalue set value='69.583333' where id=24481;
+update datasetfieldvalue set value='19.216667' where id=24523;
+update datasetfieldvalue set value='69.583333' where id=24535;
+update datasetfieldvalue set value='69.583333' where id=24538;
+update datasetfieldvalue set value='19.216667' where id=24539;
+update datasetfieldvalue set value='19.216667' where id=24552;
+update datasetfieldvalue set value='69.583333' where id=24559;
+update datasetfieldvalue set value='69.583333' where id=24572;
+update datasetfieldvalue set value='19.216667' where id=24580;
+update datasetfieldvalue set value='69.583333' where id=24589;
+update datasetfieldvalue set value='19.216667' where id=24592;
+update datasetfieldvalue set value='69.583333' where id=24600;
+update datasetfieldvalue set value='19.216667' where id=24610;
+update datasetfieldvalue set value='69.583333' where id=24646;
+update datasetfieldvalue set value='69.583333' where id=24652;
+update datasetfieldvalue set value='19.216667' where id=24656;
+update datasetfieldvalue set value='19.216667' where id=24657;
+update datasetfieldvalue set value='19.216667' where id=24679;
+update datasetfieldvalue set value='69.583333' where id=24689;
+update datasetfieldvalue set value='69.583333' where id=24695;
+update datasetfieldvalue set value='19.216667' where id=24704;
+update datasetfieldvalue set value='19.216667' where id=24715;
+update datasetfieldvalue set value='19.216667' where id=24725;
+update datasetfieldvalue set value='69.583333' where id=24742;
+update datasetfieldvalue set value='69.583333' where id=24747;
+update datasetfieldvalue set value='19.216667' where id=24750;
+update datasetfieldvalue set value='69.583333' where id=24768;
+update datasetfieldvalue set value='69.583333' where id=24783;
+update datasetfieldvalue set value='19.216667' where id=24788;
+update datasetfieldvalue set value='19.216667' where id=24794;
+update datasetfieldvalue set value='69.583333' where id=24800;
+update datasetfieldvalue set value='19.216667' where id=24806;
+update datasetfieldvalue set value='69.583333' where id=24828;
+update datasetfieldvalue set value='19.216667' where id=24836;
+update datasetfieldvalue set value='69.583333' where id=24849;
+update datasetfieldvalue set value='19.216667' where id=24854;
+update datasetfieldvalue set value='69.583333' where id=24871;
+update datasetfieldvalue set value='69.583333' where id=24881;
+update datasetfieldvalue set value='19.216667' where id=24884;
+update datasetfieldvalue set value='19.216667' where id=24905;
+update datasetfieldvalue set value='69.583333' where id=24908;
+update datasetfieldvalue set value='19.216667' where id=24918;
+update datasetfieldvalue set value='69.583333' where id=24919;
+update datasetfieldvalue set value='69.583333' where id=24924;
+update datasetfieldvalue set value='19.216667' where id=24927;
+update datasetfieldvalue set value='69.583333' where id=24975;
+update datasetfieldvalue set value='19.216667' where id=24988;
+update datasetfieldvalue set value='19.216667' where id=24993;
+update datasetfieldvalue set value='69.583333' where id=24996;
+update datasetfieldvalue set value='69.583333' where id=25006;
+update datasetfieldvalue set value='19.216667' where id=25019;
+update datasetfieldvalue set value='69.583333' where id=25020;
+update datasetfieldvalue set value='19.216667' where id=25021;
+update datasetfieldvalue set value='69.583333' where id=25049;
+update datasetfieldvalue set value='69.583333' where id=25054;
+update datasetfieldvalue set value='19.216667' where id=25062;
+update datasetfieldvalue set value='19.216667' where id=25071;
+update datasetfieldvalue set value='19.216667' where id=25088;
+update datasetfieldvalue set value='69.583333' where id=25110;
+update datasetfieldvalue set value='19.216667' where id=25112;
+update datasetfieldvalue set value='69.583333' where id=25120;
+update datasetfieldvalue set value='69.583333' where id=25132;
+update datasetfieldvalue set value='69.583333' where id=25133;
+update datasetfieldvalue set value='19.216667' where id=25142;
+update datasetfieldvalue set value='19.216667' where id=25154;
+update datasetfieldvalue set value='69.583333' where id=25190;
+update datasetfieldvalue set value='69.583333' where id=25194;
+update datasetfieldvalue set value='19.216667' where id=25204;
+update datasetfieldvalue set value='19.216667' where id=25211;
+update datasetfieldvalue set value='69.583333' where id=25214;
+update datasetfieldvalue set value='19.216667' where id=25225;
+update datasetfieldvalue set value='69.583333' where id=25227;
+update datasetfieldvalue set value='19.216667' where id=25250;
+update datasetfieldvalue set value='19.216667' where id=25254;
+update datasetfieldvalue set value='69.583333' where id=25269;
+update datasetfieldvalue set value='19.216667' where id=25287;
+update datasetfieldvalue set value='69.583333' where id=25290;
+update datasetfieldvalue set value='19.216667' where id=25298;
+update datasetfieldvalue set value='19.216667' where id=25300;
+update datasetfieldvalue set value='69.583333' where id=25324;
+update datasetfieldvalue set value='69.583333' where id=25329;
+update datasetfieldvalue set value='19.216667' where id=25354;
+update datasetfieldvalue set value='19.216667' where id=25357;
+update datasetfieldvalue set value='69.583333' where id=25361;
+update datasetfieldvalue set value='69.583333' where id=25376;
+update datasetfieldvalue set value='69.583333' where id=25388;
+update datasetfieldvalue set value='19.216667' where id=25399;
+update datasetfieldvalue set value='69.583333' where id=25404;
+update datasetfieldvalue set value='19.216667' where id=25406;
+update datasetfieldvalue set value='19.216667' where id=25436;
+update datasetfieldvalue set value='69.583333' where id=25438;
+update datasetfieldvalue set value='69.583333' where id=25440;
+update datasetfieldvalue set value='19.216667' where id=25445;
+update datasetfieldvalue set value='69.583333' where id=25466;
+update datasetfieldvalue set value='19.216667' where id=25477;
+update datasetfieldvalue set value='19.216667' where id=25502;
+update datasetfieldvalue set value='69.583333' where id=25503;
+update datasetfieldvalue set value='19.216667' where id=25508;
+update datasetfieldvalue set value='69.583333' where id=25511;
+update datasetfieldvalue set value='69.583333' where id=25521;
+update datasetfieldvalue set value='19.216667' where id=25523;
+update datasetfieldvalue set value='69.583333' where id=25550;
+update datasetfieldvalue set value='19.216667' where id=25566;
+update datasetfieldvalue set value='69.583333' where id=25575;
+update datasetfieldvalue set value='19.216667' where id=25580;
+update datasetfieldvalue set value='19.216667' where id=25591;
+update datasetfieldvalue set value='19.216667' where id=25594;
+update datasetfieldvalue set value='69.583333' where id=25597;
+update datasetfieldvalue set value='69.583333' where id=25617;
+update datasetfieldvalue set value='69.583333' where id=25632;
+update datasetfieldvalue set value='69.583333' where id=25652;
+update datasetfieldvalue set value='19.216667' where id=25656;
+update datasetfieldvalue set value='19.216667' where id=25662;
+update datasetfieldvalue set value='69.583333' where id=25691;
+update datasetfieldvalue set value='19.216667' where id=25693;
+update datasetfieldvalue set value='69.583333' where id=25698;
+update datasetfieldvalue set value='19.216667' where id=25706;
+update datasetfieldvalue set value='19.216667' where id=25717;
+update datasetfieldvalue set value='69.583333' where id=25725;
+update datasetfieldvalue set value='69.583333' where id=25731;
+update datasetfieldvalue set value='19.216667' where id=25755;
+update datasetfieldvalue set value='19.216667' where id=25761;
+update datasetfieldvalue set value='69.583333' where id=25776;
+update datasetfieldvalue set value='69.583333' where id=25784;
+update datasetfieldvalue set value='19.216667' where id=25798;
+update datasetfieldvalue set value='69.583333' where id=25800;
+update datasetfieldvalue set value='19.216667' where id=25811;
+update datasetfieldvalue set value='19.216667' where id=25816;
+update datasetfieldvalue set value='69.583333' where id=25821;
+update datasetfieldvalue set value='69.583333' where id=25864;
+update datasetfieldvalue set value='19.216667' where id=25881;
+update datasetfieldvalue set value='19.216667' where id=25882;
+update datasetfieldvalue set value='69.583333' where id=25883;
+update datasetfieldvalue set value='19.216667' where id=25890;
+update datasetfieldvalue set value='69.583333' where id=25903;
+update datasetfieldvalue set value='19.216667' where id=25919;
+update datasetfieldvalue set value='69.583333' where id=25921;
+update datasetfieldvalue set value='19.216667' where id=25927;
+update datasetfieldvalue set value='19.216667' where id=25938;
+update datasetfieldvalue set value='69.583333' where id=25963;
+update datasetfieldvalue set value='69.583333' where id=25964;
+update datasetfieldvalue set value='19.216667' where id=25968;
+update datasetfieldvalue set value='69.583333' where id=25984;
+update datasetfieldvalue set value='69.583333' where id=25990;
+update datasetfieldvalue set value='19.216667' where id=26008;
+update datasetfieldvalue set value='19.216667' where id=26016;
+update datasetfieldvalue set value='69.583333' where id=26021;
+update datasetfieldvalue set value='19.216667' where id=26041;
+update datasetfieldvalue set value='69.583333' where id=26044;
+update datasetfieldvalue set value='19.216667' where id=26062;
+update datasetfieldvalue set value='69.583333' where id=26063;
+update datasetfieldvalue set value='19.216667' where id=26066;
+update datasetfieldvalue set value='69.583333' where id=26087;
+update datasetfieldvalue set value='69.583333' where id=26096;
+update datasetfieldvalue set value='69.583333' where id=26124;
+update datasetfieldvalue set value='19.216667' where id=26127;
+update datasetfieldvalue set value='19.216667' where id=26135;
+update datasetfieldvalue set value='69.583333' where id=26148;
+update datasetfieldvalue set value='69.583333' where id=26153;
+update datasetfieldvalue set value='19.216667' where id=26154;
+update datasetfieldvalue set value='19.216667' where id=26158;
+update datasetfieldvalue set value='19.216667' where id=26187;
+update datasetfieldvalue set value='19.216667' where id=26188;
+update datasetfieldvalue set value='69.583333' where id=26202;
+update datasetfieldvalue set value='69.583333' where id=26208;
+update datasetfieldvalue set value='19.216667' where id=26240;
+update datasetfieldvalue set value='69.583333' where id=26249;
+update datasetfieldvalue set value='19.216667' where id=26256;
+update datasetfieldvalue set value='69.583333' where id=26261;
+update datasetfieldvalue set value='19.216667' where id=26272;
+update datasetfieldvalue set value='69.583333' where id=26285;
+update datasetfieldvalue set value='69.583333' where id=26290;
+update datasetfieldvalue set value='19.216667' where id=26301;
+update datasetfieldvalue set value='69.583333' where id=26315;
+update datasetfieldvalue set value='69.583333' where id=26323;
+update datasetfieldvalue set value='19.216667' where id=26333;
+update datasetfieldvalue set value='19.216667' where id=26344;
+update datasetfieldvalue set value='69.583333' where id=26356;
+update datasetfieldvalue set value='19.216667' where id=26363;
+update datasetfieldvalue set value='19.216667' where id=26365;
+update datasetfieldvalue set value='69.583333' where id=26367;
+update datasetfieldvalue set value='19.216667' where id=26401;
+update datasetfieldvalue set value='69.583333' where id=26403;
+update datasetfieldvalue set value='69.583333' where id=26415;
+update datasetfieldvalue set value='19.216667' where id=26428;
+update datasetfieldvalue set value='69.583333' where id=26703;
+update datasetfieldvalue set value='19.216667' where id=26718;
+update datasetfieldvalue set value='69.583333' where id=26721;
+update datasetfieldvalue set value='19.216667' where id=26733;
+update datasetfieldvalue set value='19.216667' where id=26747;
+update datasetfieldvalue set value='19.216667' where id=26753;
+update datasetfieldvalue set value='69.583333' where id=26769;
+update datasetfieldvalue set value='69.583333' where id=26775;
+update datasetfieldvalue set value='19.216667' where id=26814;
+update datasetfieldvalue set value='19.216667' where id=26822;
+update datasetfieldvalue set value='69.583333' where id=26833;
+update datasetfieldvalue set value='69.583333' where id=26841;
+update datasetfieldvalue set value='19.216667' where id=26852;
+update datasetfieldvalue set value='19.216667' where id=26856;
+update datasetfieldvalue set value='69.583333' where id=26864;
+update datasetfieldvalue set value='69.583333' where id=26880;
+update datasetfieldvalue set value='69.583333' where id=26917;
+update datasetfieldvalue set value='19.216667' where id=26921;
+update datasetfieldvalue set value='19.216667' where id=26927;
+update datasetfieldvalue set value='69.583333' where id=26930;
+update datasetfieldvalue set value='19.216667' where id=26953;
+update datasetfieldvalue set value='69.583333' where id=26957;
+update datasetfieldvalue set value='69.583333' where id=26958;
+update datasetfieldvalue set value='19.216667' where id=26972;
+update datasetfieldvalue set value='19.216667' where id=26979;
+update datasetfieldvalue set value='69.583333' where id=26993;
+update datasetfieldvalue set value='19.216667' where id=26996;
+update datasetfieldvalue set value='69.583333' where id=27010;
+update datasetfieldvalue set value='69.583333' where id=27022;
+update datasetfieldvalue set value='19.216667' where id=27040;
+update datasetfieldvalue set value='19.216667' where id=27047;
+update datasetfieldvalue set value='69.583333' where id=27054;
+update datasetfieldvalue set value='19.216667' where id=27061;
+update datasetfieldvalue set value='69.583333' where id=27067;
+update datasetfieldvalue set value='19.216667' where id=27070;
+update datasetfieldvalue set value='69.583333' where id=27071;
+update datasetfieldvalue set value='19.216667' where id=27102;
+update datasetfieldvalue set value='69.583333' where id=27107;
+update datasetfieldvalue set value='69.583333' where id=27132;
+update datasetfieldvalue set value='19.216667' where id=27135;
+update datasetfieldvalue set value='69.583333' where id=27144;
+update datasetfieldvalue set value='19.216667' where id=27150;
+update datasetfieldvalue set value='69.583333' where id=27179;
+update datasetfieldvalue set value='19.216667' where id=27184;
+update datasetfieldvalue set value='19.216667' where id=27191;
+update datasetfieldvalue set value='69.583333' where id=27199;
+update datasetfieldvalue set value='19.216667' where id=27202;
+update datasetfieldvalue set value='69.583333' where id=27214;
+update datasetfieldvalue set value='19.216667' where id=27240;
+update datasetfieldvalue set value='69.583333' where id=27245;
+update datasetfieldvalue set value='69.583333' where id=27248;
+update datasetfieldvalue set value='19.216667' where id=27264;
+update datasetfieldvalue set value='69.583333' where id=27269;
+update datasetfieldvalue set value='69.583333' where id=27271;
+update datasetfieldvalue set value='19.216667' where id=27287;
+update datasetfieldvalue set value='19.216667' where id=27305;
+update datasetfieldvalue set value='69.583333' where id=27312;
+update datasetfieldvalue set value='19.216667' where id=27326;
+update datasetfieldvalue set value='19.216667' where id=27327;
+update datasetfieldvalue set value='69.583333' where id=27333;
+update datasetfieldvalue set value='69.583333' where id=27353;
+update datasetfieldvalue set value='19.216667' where id=27360;
+update datasetfieldvalue set value='19.216667' where id=27370;
+update datasetfieldvalue set value='69.583333' where id=27389;
+update datasetfieldvalue set value='69.583333' where id=27396;
+update datasetfieldvalue set value='19.216667' where id=27398;
+update datasetfieldvalue set value='69.583333' where id=27399;
+update datasetfieldvalue set value='19.216667' where id=27405;
+update datasetfieldvalue set value='69.583333' where id=27446;
+update datasetfieldvalue set value='19.216667' where id=27455;
+update datasetfieldvalue set value='19.216667' where id=27464;
+update datasetfieldvalue set value='69.583333' where id=27475;
+update datasetfieldvalue set value='19.216667' where id=27758;
+update datasetfieldvalue set value='69.583333' where id=27765;
+update datasetfieldvalue set value='69.583333' where id=27783;
+update datasetfieldvalue set value='19.216667' where id=27789;
+update datasetfieldvalue set value='19.216667' where id=27882;
+update datasetfieldvalue set value='69.583333' where id=27888;
+update datasetfieldvalue set value='69.583333' where id=27905;
+update datasetfieldvalue set value='19.216667' where id=27915;
+update datasetfieldvalue set value='19.216667' where id=27920;
+update datasetfieldvalue set value='69.583333' where id=27926;
+update datasetfieldvalue set value='19.216667' where id=27938;
+update datasetfieldvalue set value='69.583333' where id=27950;
+update datasetfieldvalue set value='19.216667' where id=27969;
+update datasetfieldvalue set value='69.583333' where id=27971;
+update datasetfieldvalue set value='69.583333' where id=27973;
+update datasetfieldvalue set value='19.216667' where id=27986;
+update datasetfieldvalue set value='19.216667' where id=28005;
+update datasetfieldvalue set value='69.583333' where id=28014;
+update datasetfieldvalue set value='69.583333' where id=28038;
+update datasetfieldvalue set value='19.216667' where id=28039;
+update datasetfieldvalue set value='19.216667' where id=28074;
+update datasetfieldvalue set value='69.583333' where id=28084;
+update datasetfieldvalue set value='19.216667' where id=28086;
+update datasetfieldvalue set value='69.583333' where id=28087;
+update datasetfieldvalue set value='69.583333' where id=28097;
+update datasetfieldvalue set value='19.216667' where id=28116;
+update datasetfieldvalue set value='69.583333' where id=28122;
+update datasetfieldvalue set value='19.216667' where id=28124;
+update datasetfieldvalue set value='69.583333' where id=28140;
+update datasetfieldvalue set value='19.216667' where id=28145;
+update datasetfieldvalue set value='19.216667' where id=28155;
+update datasetfieldvalue set value='69.583333' where id=28161;
+update datasetfieldvalue set value='69.583333' where id=28685;
+update datasetfieldvalue set value='19.216667' where id=28698;
+update datasetfieldvalue set value='69.583333' where id=28708;
+update datasetfieldvalue set value='19.216667' where id=28709;
+update datasetfieldvalue set value='19.216667' where id=28735;
+update datasetfieldvalue set value='69.583333' where id=28737;
+update datasetfieldvalue set value='69.583333' where id=28754;
+update datasetfieldvalue set value='19.216667' where id=28760;
+update datasetfieldvalue set value='69.583333' where id=28765;
+update datasetfieldvalue set value='19.216667' where id=28778;
+update datasetfieldvalue set value='19.216667' where id=28785;
+update datasetfieldvalue set value='69.583333' where id=28800;
+update datasetfieldvalue set value='69.583333' where id=28808;
+update datasetfieldvalue set value='19.216667' where id=28818;
+update datasetfieldvalue set value='69.583333' where id=28819;
+update datasetfieldvalue set value='19.216667' where id=28842;
+update datasetfieldvalue set value='69.583333' where id=28978;
+update datasetfieldvalue set value='69.583333' where id=28991;
+update datasetfieldvalue set value='19.216667' where id=28994;
+update datasetfieldvalue set value='19.216667' where id=28996;
+update datasetfieldvalue set value='19.216667' where id=29016;
+update datasetfieldvalue set value='19.216667' where id=29029;
+update datasetfieldvalue set value='69.583333' where id=29037;
+update datasetfieldvalue set value='69.583333' where id=29047;
+update datasetfieldvalue set value='19.216667' where id=29057;
+update datasetfieldvalue set value='69.583333' where id=29067;
+update datasetfieldvalue set value='19.216667' where id=29082;
+update datasetfieldvalue set value='69.583333' where id=29094;
+update datasetfieldvalue set value='19.216667' where id=29102;
+update datasetfieldvalue set value='69.583333' where id=29104;
+update datasetfieldvalue set value='19.216667' where id=29115;
+update datasetfieldvalue set value='69.583333' where id=29136;
+update datasetfieldvalue set value='19.216667' where id=29141;
+update datasetfieldvalue set value='69.583333' where id=29146;
+update datasetfieldvalue set value='69.583333' where id=29150;
+update datasetfieldvalue set value='19.216667' where id=29178;
+update datasetfieldvalue set value='69.583333' where id=29187;
+update datasetfieldvalue set value='69.583333' where id=29196;
+update datasetfieldvalue set value='19.216667' where id=29202;
+update datasetfieldvalue set value='19.216667' where id=29210;
+update datasetfieldvalue set value='19.216667' where id=29226;
+update datasetfieldvalue set value='69.583333' where id=29230;
+update datasetfieldvalue set value='19.216667' where id=29249;
+update datasetfieldvalue set value='69.583333' where id=29255;
+update datasetfieldvalue set value='69.583333' where id=29275;
+update datasetfieldvalue set value='19.216667' where id=29283;
+update datasetfieldvalue set value='69.583333' where id=29292;
+update datasetfieldvalue set value='19.216667' where id=29298;
+update datasetfieldvalue set value='19.216667' where id=29306;
+update datasetfieldvalue set value='69.583333' where id=29310;
+update datasetfieldvalue set value='69.583333' where id=29317;
+update datasetfieldvalue set value='19.216667' where id=29346;
+update datasetfieldvalue set value='19.216667' where id=29359;
+update datasetfieldvalue set value='19.216667' where id=29363;
+update datasetfieldvalue set value='69.583333' where id=29384;
+update datasetfieldvalue set value='69.583333' where id=29385;
+update datasetfieldvalue set value='19.216667' where id=29396;
+update datasetfieldvalue set value='69.583333' where id=29409;
+update datasetfieldvalue set value='69.583333' where id=29410;
+update datasetfieldvalue set value='19.216667' where id=29430;
+update datasetfieldvalue set value='69.583333' where id=29437;
+update datasetfieldvalue set value='19.216667' where id=29467;
+update datasetfieldvalue set value='19.216667' where id=29468;
+update datasetfieldvalue set value='69.583333' where id=29469;
+update datasetfieldvalue set value='19.216667' where id=29478;
+update datasetfieldvalue set value='69.583333' where id=29483;
+update datasetfieldvalue set value='69.583333' where id=29492;
+update datasetfieldvalue set value='19.216667' where id=29510;
+update datasetfieldvalue set value='69.583333' where id=29529;
+update datasetfieldvalue set value='19.216667' where id=29542;
+update datasetfieldvalue set value='69.583333' where id=29543;
+update datasetfieldvalue set value='19.216667' where id=29544;
+update datasetfieldvalue set value='69.583333' where id=29566;
+update datasetfieldvalue set value='19.216667' where id=29570;
+update datasetfieldvalue set value='19.216667' where id=29595;
+update datasetfieldvalue set value='69.583333' where id=29598;
+update datasetfieldvalue set value='19.216667' where id=29602;
+update datasetfieldvalue set value='69.583333' where id=29605;
+update datasetfieldvalue set value='19.216667' where id=29610;
+update datasetfieldvalue set value='69.583333' where id=29620;
+update datasetfieldvalue set value='19.216667' where id=29656;
+update datasetfieldvalue set value='69.583333' where id=29657;
+update datasetfieldvalue set value='19.216667' where id=29676;
+update datasetfieldvalue set value='69.583333' where id=29678;
+update datasetfieldvalue set value='19.216667' where id=29686;
+update datasetfieldvalue set value='69.583333' where id=29694;
+update datasetfieldvalue set value='19.216667' where id=29696;
+update datasetfieldvalue set value='69.583333' where id=29708;
+update datasetfieldvalue set value='19.216667' where id=29727;
+update datasetfieldvalue set value='19.216667' where id=29747;
+update datasetfieldvalue set value='69.583333' where id=29749;
+update datasetfieldvalue set value='69.583333' where id=29756;
+update datasetfieldvalue set value='69.583333' where id=29777;
+update datasetfieldvalue set value='69.583333' where id=29783;
+update datasetfieldvalue set value='19.216667' where id=29797;
+update datasetfieldvalue set value='19.216667' where id=29806;
+update datasetfieldvalue set value='69.583333' where id=29811;
+update datasetfieldvalue set value='19.216667' where id=29826;
+update datasetfieldvalue set value='19.216667' where id=29828;
+update datasetfieldvalue set value='69.583333' where id=29850;
+update datasetfieldvalue set value='69.583333' where id=29862;
+update datasetfieldvalue set value='69.583333' where id=29866;
+update datasetfieldvalue set value='19.216667' where id=29884;
+update datasetfieldvalue set value='19.216667' where id=29892;
+update datasetfieldvalue set value='19.216667' where id=29894;
+update datasetfieldvalue set value='69.583333' where id=29905;
+update datasetfieldvalue set value='19.216667' where id=29912;
+update datasetfieldvalue set value='69.583333' where id=29920;
+update datasetfieldvalue set value='19.216667' where id=29946;
+update datasetfieldvalue set value='19.216667' where id=29958;
+update datasetfieldvalue set value='69.583333' where id=29959;
+update datasetfieldvalue set value='69.583333' where id=29964;
+update datasetfieldvalue set value='19.216667' where id=29997;
+update datasetfieldvalue set value='69.583333' where id=30002;
+update datasetfieldvalue set value='69.583333' where id=30007;
+update datasetfieldvalue set value='19.216667' where id=30011;
+update datasetfieldvalue set value='69.583333' where id=30021;
+update datasetfieldvalue set value='69.583333' where id=30035;
+update datasetfieldvalue set value='19.216667' where id=30058;
+update datasetfieldvalue set value='19.216667' where id=30061;
+update datasetfieldvalue set value='69.583333' where id=30062;
+update datasetfieldvalue set value='69.583333' where id=30073;
+update datasetfieldvalue set value='19.216667' where id=30084;
+update datasetfieldvalue set value='19.216667' where id=30095;
+update datasetfieldvalue set value='69.583333' where id=30106;
+update datasetfieldvalue set value='19.216667' where id=30118;
+update datasetfieldvalue set value='69.583333' where id=30120;
+update datasetfieldvalue set value='19.216667' where id=30142;
+update datasetfieldvalue set value='69.583333' where id=30151;
+update datasetfieldvalue set value='19.216667' where id=30180;
+update datasetfieldvalue set value='69.583333' where id=30182;
+update datasetfieldvalue set value='19.216667' where id=30187;
+update datasetfieldvalue set value='19.216667' where id=30197;
+update datasetfieldvalue set value='19.216667' where id=30199;
+update datasetfieldvalue set value='69.583333' where id=30203;
+update datasetfieldvalue set value='69.583333' where id=30229;
+update datasetfieldvalue set value='19.216667' where id=30239;
+update datasetfieldvalue set value='69.583333' where id=30244;
+update datasetfieldvalue set value='19.216667' where id=30247;
+update datasetfieldvalue set value='69.583333' where id=30253;
+update datasetfieldvalue set value='69.583333' where id=30279;
+update datasetfieldvalue set value='69.583333' where id=30288;
+update datasetfieldvalue set value='19.216667' where id=30303;
+update datasetfieldvalue set value='19.216667' where id=30309;
+update datasetfieldvalue set value='19.216667' where id=30316;
+update datasetfieldvalue set value='19.216667' where id=30326;
+update datasetfieldvalue set value='69.583333' where id=30347;
+update datasetfieldvalue set value='69.583333' where id=30349;
+update datasetfieldvalue set value='69.583333' where id=30361;
+update datasetfieldvalue set value='19.216667' where id=30370;
+update datasetfieldvalue set value='19.216667' where id=30372;
+update datasetfieldvalue set value='69.583333' where id=30396;
+update datasetfieldvalue set value='69.583333' where id=30400;
+update datasetfieldvalue set value='19.216667' where id=30433;
+update datasetfieldvalue set value='69.583333' where id=30437;
+update datasetfieldvalue set value='19.216667' where id=30438;
+update datasetfieldvalue set value='19.216667' where id=30500;
+update datasetfieldvalue set value='69.583333' where id=30509;
+update datasetfieldvalue set value='19.216667' where id=30511;
+update datasetfieldvalue set value='69.583333' where id=30525;
+update datasetfieldvalue set value='69.583333' where id=30551;
+update datasetfieldvalue set value='19.216667' where id=30555;
+update datasetfieldvalue set value='19.216667' where id=30557;
+update datasetfieldvalue set value='69.583333' where id=30559;
+update datasetfieldvalue set value='69.583333' where id=30577;
+update datasetfieldvalue set value='19.216667' where id=30591;
+update datasetfieldvalue set value='69.583333' where id=30592;
+update datasetfieldvalue set value='19.216667' where id=30607;
+update datasetfieldvalue set value='19.216667' where id=30622;
+update datasetfieldvalue set value='69.583333' where id=30643;
+update datasetfieldvalue set value='69.583333' where id=30647;
+update datasetfieldvalue set value='19.216667' where id=30653;
+update datasetfieldvalue set value='69.583333' where id=30665;
+update datasetfieldvalue set value='19.216667' where id=30670;
+update datasetfieldvalue set value='19.216667' where id=30675;
+update datasetfieldvalue set value='69.583333' where id=30692;
+update datasetfieldvalue set value='69.583333' where id=30710;
+update datasetfieldvalue set value='69.583333' where id=30719;
+update datasetfieldvalue set value='19.216667' where id=30724;
+update datasetfieldvalue set value='19.216667' where id=30734;
+update datasetfieldvalue set value='19.216667' where id=30749;
+update datasetfieldvalue set value='69.583333' where id=30764;
+update datasetfieldvalue set value='19.216667' where id=30769;
+update datasetfieldvalue set value='69.583333' where id=30777;
+update datasetfieldvalue set value='19.216667' where id=30794;
+update datasetfieldvalue set value='19.216667' where id=30800;
+update datasetfieldvalue set value='69.583333' where id=30807;
+update datasetfieldvalue set value='69.583333' where id=30822;
+update datasetfieldvalue set value='69.583333' where id=30829;
+update datasetfieldvalue set value='19.216667' where id=30830;
+update datasetfieldvalue set value='69.583333' where id=30852;
+update datasetfieldvalue set value='19.216667' where id=30861;
+update datasetfieldvalue set value='19.216667' where id=30889;
+update datasetfieldvalue set value='19.216667' where id=30898;
+update datasetfieldvalue set value='69.583333' where id=30901;
+update datasetfieldvalue set value='69.583333' where id=30909;
+update datasetfieldvalue set value='19.216667' where id=30918;
+update datasetfieldvalue set value='19.216667' where id=30925;
+update datasetfieldvalue set value='69.583333' where id=30936;
+update datasetfieldvalue set value='69.583333' where id=30945;
+update datasetfieldvalue set value='19.216667' where id=30956;
+update datasetfieldvalue set value='19.216667' where id=30961;
+update datasetfieldvalue set value='69.583333' where id=30976;
+update datasetfieldvalue set value='69.583333' where id=30986;
+update datasetfieldvalue set value='19.216667' where id=31004;
+update datasetfieldvalue set value='19.216667' where id=31016;
+update datasetfieldvalue set value='69.583333' where id=31028;
+update datasetfieldvalue set value='69.583333' where id=31029;
+update datasetfieldvalue set value='69.583333' where id=31050;
+update datasetfieldvalue set value='19.216667' where id=31067;
+update datasetfieldvalue set value='19.216667' where id=31074;
+update datasetfieldvalue set value='69.583333' where id=31076;
+update datasetfieldvalue set value='19.216667' where id=31094;
+update datasetfieldvalue set value='69.583333' where id=31098;
+update datasetfieldvalue set value='69.583333' where id=31103;
+update datasetfieldvalue set value='19.216667' where id=31110;
+update datasetfieldvalue set value='19.216667' where id=31133;
+update datasetfieldvalue set value='19.216667' where id=31137;
+update datasetfieldvalue set value='69.583333' where id=31147;
+update datasetfieldvalue set value='69.583333' where id=31156;
+update datasetfieldvalue set value='19.216667' where id=31162;
+update datasetfieldvalue set value='19.216667' where id=31171;
+update datasetfieldvalue set value='69.583333' where id=31186;
+update datasetfieldvalue set value='69.583333' where id=31197;
+update datasetfieldvalue set value='69.583333' where id=31207;
+update datasetfieldvalue set value='69.583333' where id=31216;
+update datasetfieldvalue set value='19.216667' where id=31224;
+update datasetfieldvalue set value='19.216667' where id=31233;
+update datasetfieldvalue set value='19.216667' where id=31252;
+update datasetfieldvalue set value='69.583333' where id=31265;
+update datasetfieldvalue set value='69.583333' where id=31283;
+update datasetfieldvalue set value='19.216667' where id=31287;
+update datasetfieldvalue set value='69.583333' where id=31303;
+update datasetfieldvalue set value='19.216667' where id=31304;
+update datasetfieldvalue set value='69.583333' where id=31311;
+update datasetfieldvalue set value='19.216667' where id=31328;
+update datasetfieldvalue set value='69.583333' where id=31337;
+update datasetfieldvalue set value='19.216667' where id=31354;
+update datasetfieldvalue set value='19.216667' where id=31355;
+update datasetfieldvalue set value='69.583333' where id=31358;
+update datasetfieldvalue set value='19.216667' where id=31380;
+update datasetfieldvalue set value='69.583333' where id=31383;
+update datasetfieldvalue set value='19.216667' where id=31389;
+update datasetfieldvalue set value='69.583333' where id=31404;
+update datasetfieldvalue set value='69.583333' where id=31433;
+update datasetfieldvalue set value='69.583333' where id=31439;
+update datasetfieldvalue set value='19.216667' where id=31441;
+update datasetfieldvalue set value='19.216667' where id=31454;
+update datasetfieldvalue set value='69.583333' where id=31463;
+update datasetfieldvalue set value='69.583333' where id=31465;
+update datasetfieldvalue set value='19.216667' where id=31474;
+update datasetfieldvalue set value='19.216667' where id=31497;
+update datasetfieldvalue set value='69.583333' where id=31515;
+update datasetfieldvalue set value='69.583333' where id=31524;
+update datasetfieldvalue set value='19.216667' where id=31526;
+update datasetfieldvalue set value='19.216667' where id=31528;
+update datasetfieldvalue set value='19.216667' where id=31544;
+update datasetfieldvalue set value='69.583333' where id=31569;
+update datasetfieldvalue set value='19.216667' where id=31572;
+update datasetfieldvalue set value='69.583333' where id=31581;
+update datasetfieldvalue set value='19.216667' where id=31594;
+update datasetfieldvalue set value='19.216667' where id=31612;
+update datasetfieldvalue set value='69.583333' where id=31618;
+update datasetfieldvalue set value='69.583333' where id=31622;
+update datasetfieldvalue set value='69.583333' where id=31630;
+update datasetfieldvalue set value='69.583333' where id=31653;
+update datasetfieldvalue set value='19.216667' where id=31657;
+update datasetfieldvalue set value='19.216667' where id=31660;
+update datasetfieldvalue set value='19.216667' where id=31668;
+update datasetfieldvalue set value='69.583333' where id=31670;
+update datasetfieldvalue set value='69.583333' where id=31683;
+update datasetfieldvalue set value='19.216667' where id=31691;
+update datasetfieldvalue set value='19.216667' where id=31718;
+update datasetfieldvalue set value='69.583333' where id=31721;
+update datasetfieldvalue set value='19.216667' where id=31722;
+update datasetfieldvalue set value='69.583333' where id=31730;
+update datasetfieldvalue set value='69.583333' where id=31776;
+update datasetfieldvalue set value='19.216667' where id=31783;
+update datasetfieldvalue set value='69.583333' where id=31787;
+update datasetfieldvalue set value='19.216667' where id=31788;
+update datasetfieldvalue set value='69.583333' where id=31792;
+update datasetfieldvalue set value='69.583333' where id=31799;
+update datasetfieldvalue set value='19.216667' where id=31800;
+update datasetfieldvalue set value='19.216667' where id=31814;
+update datasetfieldvalue set value='19.216667' where id=31834;
+update datasetfieldvalue set value='69.583333' where id=31840;
+update datasetfieldvalue set value='69.583333' where id=31854;
+update datasetfieldvalue set value='19.216667' where id=31864;
+update datasetfieldvalue set value='19.216667' where id=31898;
+update datasetfieldvalue set value='19.216667' where id=31900;
+update datasetfieldvalue set value='69.583333' where id=31901;
+update datasetfieldvalue set value='69.583333' where id=31909;
+update datasetfieldvalue set value='19.216667' where id=31922;
+update datasetfieldvalue set value='19.216667' where id=31924;
+update datasetfieldvalue set value='69.583333' where id=31937;
+update datasetfieldvalue set value='69.583333' where id=31953;
+update datasetfieldvalue set value='19.216667' where id=31973;
+update datasetfieldvalue set value='69.583333' where id=31984;
+update datasetfieldvalue set value='69.583333' where id=31988;
+update datasetfieldvalue set value='19.216667' where id=31998;
+update datasetfieldvalue set value='19.216667' where id=32007;
+update datasetfieldvalue set value='69.583333' where id=32011;
+update datasetfieldvalue set value='69.583333' where id=32016;
+update datasetfieldvalue set value='19.216667' where id=32039;
+update datasetfieldvalue set value='19.216667' where id=32052;
+update datasetfieldvalue set value='19.216667' where id=32071;
+update datasetfieldvalue set value='69.583333' where id=32074;
+update datasetfieldvalue set value='69.583333' where id=32075;
+update datasetfieldvalue set value='69.583333' where id=32087;
+update datasetfieldvalue set value='69.583333' where id=32090;
+update datasetfieldvalue set value='19.216667' where id=32097;
+update datasetfieldvalue set value='19.216667' where id=32102;
+update datasetfieldvalue set value='19.216667' where id=32132;
+update datasetfieldvalue set value='69.583333' where id=32136;
+update datasetfieldvalue set value='69.583333' where id=32148;
+update datasetfieldvalue set value='19.216667' where id=32154;
+update datasetfieldvalue set value='69.583333' where id=32180;
+update datasetfieldvalue set value='19.216667' where id=32190;
+update datasetfieldvalue set value='69.583333' where id=32191;
+update datasetfieldvalue set value='19.216667' where id=32202;
+update datasetfieldvalue set value='19.216667' where id=32215;
+update datasetfieldvalue set value='19.216667' where id=32222;
+update datasetfieldvalue set value='69.583333' where id=32225;
+update datasetfieldvalue set value='69.583333' where id=32242;
+update datasetfieldvalue set value='69.583333' where id=32277;
+update datasetfieldvalue set value='19.216667' where id=32279;
+update datasetfieldvalue set value='69.583333' where id=32289;
+update datasetfieldvalue set value='19.216667' where id=32290;
+update datasetfieldvalue set value='19.216667' where id=32296;
+update datasetfieldvalue set value='69.583333' where id=32304;
+update datasetfieldvalue set value='69.583333' where id=32322;
+update datasetfieldvalue set value='19.216667' where id=32326;
+update datasetfieldvalue set value='69.583333' where id=32350;
+update datasetfieldvalue set value='69.583333' where id=32366;
+update datasetfieldvalue set value='19.216667' where id=32378;
+update datasetfieldvalue set value='19.216667' where id=32379;
+update datasetfieldvalue set value='19.216667' where id=32393;
+update datasetfieldvalue set value='19.216667' where id=32403;
+update datasetfieldvalue set value='69.583333' where id=32406;
+update datasetfieldvalue set value='69.583333' where id=32409;
+update datasetfieldvalue set value='69.583333' where id=32425;
+update datasetfieldvalue set value='19.216667' where id=32436;
+update datasetfieldvalue set value='19.216667' where id=32437;
+update datasetfieldvalue set value='69.583333' where id=32445;
+update datasetfieldvalue set value='69.583333' where id=32469;
+update datasetfieldvalue set value='19.216667' where id=32487;
+update datasetfieldvalue set value='19.216667' where id=32495;
+update datasetfieldvalue set value='69.583333' where id=32497;
+update datasetfieldvalue set value='69.583333' where id=32515;
+update datasetfieldvalue set value='69.583333' where id=32518;
+update datasetfieldvalue set value='19.216667' where id=32521;
+update datasetfieldvalue set value='19.216667' where id=32537;
+update datasetfieldvalue set value='19.216667' where id=32559;
+update datasetfieldvalue set value='69.583333' where id=32565;
+update datasetfieldvalue set value='69.583333' where id=32570;
+update datasetfieldvalue set value='19.216667' where id=32582;
+update datasetfieldvalue set value='69.583333' where id=32655;
+update datasetfieldvalue set value='19.216667' where id=32660;
+update datasetfieldvalue set value='69.583333' where id=32665;
+update datasetfieldvalue set value='19.216667' where id=32668;
+update datasetfieldvalue set value='69.583333' where id=32685;
+update datasetfieldvalue set value='19.216667' where id=32700;
+update datasetfieldvalue set value='19.216667' where id=32701;
+update datasetfieldvalue set value='69.583333' where id=32710;
+update datasetfieldvalue set value='69.583333' where id=32716;
+update datasetfieldvalue set value='19.216667' where id=32727;
+update datasetfieldvalue set value='19.216667' where id=32738;
+update datasetfieldvalue set value='69.583333' where id=32744;
+update datasetfieldvalue set value='69.583333' where id=32759;
+update datasetfieldvalue set value='69.583333' where id=32768;
+update datasetfieldvalue set value='19.216667' where id=32788;
+update datasetfieldvalue set value='19.216667' where id=32791;
+update datasetfieldvalue set value='19.216667' where id=32814;
+update datasetfieldvalue set value='19.216667' where id=32829;
+update datasetfieldvalue set value='69.583333' where id=32838;
+update datasetfieldvalue set value='69.583333' where id=32839;
+update datasetfieldvalue set value='19.216667' where id=32848;
+update datasetfieldvalue set value='69.583333' where id=32858;
+update datasetfieldvalue set value='69.583333' where id=32859;
+update datasetfieldvalue set value='19.216667' where id=32872;
+update datasetfieldvalue set value='19.216667' where id=32893;
+update datasetfieldvalue set value='19.216667' where id=32901;
+update datasetfieldvalue set value='69.583333' where id=32914;
+update datasetfieldvalue set value='69.583333' where id=32917;
+update datasetfieldvalue set value='19.216667' where id=32928;
+update datasetfieldvalue set value='69.583333' where id=32932;
+update datasetfieldvalue set value='19.216667' where id=32962;
+update datasetfieldvalue set value='69.583333' where id=32965;
+update datasetfieldvalue set value='69.583333' where id=32977;
+update datasetfieldvalue set value='19.216667' where id=32982;
+update datasetfieldvalue set value='69.583333' where id=32999;
+update datasetfieldvalue set value='19.216667' where id=33007;
+update datasetfieldvalue set value='69.583333' where id=33016;
+update datasetfieldvalue set value='19.216667' where id=33019;
+update datasetfieldvalue set value='69.583333' where id=33021;
+update datasetfieldvalue set value='19.216667' where id=33049;
+update datasetfieldvalue set value='-82.0' where id=33052;
+update datasetfieldvalue set value='-4.0' where id=33053;
+update datasetfieldvalue set value='-79.0' where id=33054;
+update datasetfieldvalue set value='-2.0' where id=33055;
+update datasetfieldvalue set value='69.583333' where id=33063;
+update datasetfieldvalue set value='19.216667' where id=33066;
+update datasetfieldvalue set value='69.583333' where id=33069;
+update datasetfieldvalue set value='19.216667' where id=33096;
+update datasetfieldvalue set value='69.583333' where id=33107;
+update datasetfieldvalue set value='69.583333' where id=33115;
+update datasetfieldvalue set value='19.216667' where id=33126;
+update datasetfieldvalue set value='19.216667' where id=33142;
+update datasetfieldvalue set value='19.216667' where id=33153;
+update datasetfieldvalue set value='69.583333' where id=33160;
+update datasetfieldvalue set value='19.216667' where id=33168;
+update datasetfieldvalue set value='69.583333' where id=33177;
+update datasetfieldvalue set value='69.583333' where id=33196;
+update datasetfieldvalue set value='19.216667' where id=33201;
+update datasetfieldvalue set value='69.583333' where id=33204;
+update datasetfieldvalue set value='19.216667' where id=33209;
+update datasetfieldvalue set value='19.216667' where id=33247;
+update datasetfieldvalue set value='69.583333' where id=33253;
+update datasetfieldvalue set value='69.583333' where id=33256;
+update datasetfieldvalue set value='19.216667' where id=33270;
+update datasetfieldvalue set value='19.216667' where id=33272;
+update datasetfieldvalue set value='69.583333' where id=33283;
+update datasetfieldvalue set value='19.216667' where id=33285;
+update datasetfieldvalue set value='69.583333' where id=33302;
+update datasetfieldvalue set value='69.583333' where id=33328;
+update datasetfieldvalue set value='19.216667' where id=33343;
+update datasetfieldvalue set value='19.216667' where id=33345;
+update datasetfieldvalue set value='69.583333' where id=33353;
+update datasetfieldvalue set value='69.583333' where id=33364;
+update datasetfieldvalue set value='19.216667' where id=33372;
+update datasetfieldvalue set value='19.216667' where id=33390;
+update datasetfieldvalue set value='69.583333' where id=33396;
+update datasetfieldvalue set value='69.583333' where id=33397;
+update datasetfieldvalue set value='19.216667' where id=33398;
+update datasetfieldvalue set value='19.216667' where id=33423;
+update datasetfieldvalue set value='69.583333' where id=33425;
+update datasetfieldvalue set value='19.216667' where id=33439;
+update datasetfieldvalue set value='19.216667' where id=33444;
+update datasetfieldvalue set value='69.583333' where id=33447;
+update datasetfieldvalue set value='69.583333' where id=33477;
+update datasetfieldvalue set value='69.583333' where id=33486;
+update datasetfieldvalue set value='19.216667' where id=33491;
+update datasetfieldvalue set value='19.216667' where id=33501;
+update datasetfieldvalue set value='69.583333' where id=33511;
+update datasetfieldvalue set value='69.583333' where id=33530;
+update datasetfieldvalue set value='19.216667' where id=33532;
+update datasetfieldvalue set value='69.583333' where id=33534;
+update datasetfieldvalue set value='19.216667' where id=33560;
+update datasetfieldvalue set value='69.583333' where id=33579;
+update datasetfieldvalue set value='69.583333' where id=33595;
+update datasetfieldvalue set value='19.216667' where id=33598;
+update datasetfieldvalue set value='19.216667' where id=33601;
+update datasetfieldvalue set value='69.583333' where id=33612;
+update datasetfieldvalue set value='19.216667' where id=33620;
+update datasetfieldvalue set value='69.583333' where id=33636;
+update datasetfieldvalue set value='19.216667' where id=33643;
+update datasetfieldvalue set value='19.216667' where id=33649;
+update datasetfieldvalue set value='69.583333' where id=33670;
+update datasetfieldvalue set value='19.216667' where id=33680;
+update datasetfieldvalue set value='69.583333' where id=33686;
+update datasetfieldvalue set value='69.583333' where id=33695;
+update datasetfieldvalue set value='19.216667' where id=33696;
+update datasetfieldvalue set value='19.216667' where id=33718;
+update datasetfieldvalue set value='69.583333' where id=33732;
+update datasetfieldvalue set value='69.583333' where id=33735;
+update datasetfieldvalue set value='19.216667' where id=33739;
+update datasetfieldvalue set value='19.216667' where id=33755;
+update datasetfieldvalue set value='69.583333' where id=33774;
+update datasetfieldvalue set value='69.583333' where id=33777;
+update datasetfieldvalue set value='19.216667' where id=33789;
+update datasetfieldvalue set value='19.216667' where id=33806;
+update datasetfieldvalue set value='69.583333' where id=33813;
+update datasetfieldvalue set value='19.216667' where id=33831;
+update datasetfieldvalue set value='69.583333' where id=33839;
+update datasetfieldvalue set value='19.216667' where id=33850;
+update datasetfieldvalue set value='69.583333' where id=33858;
+update datasetfieldvalue set value='69.583333' where id=33872;
+update datasetfieldvalue set value='19.216667' where id=33877;
+update datasetfieldvalue set value='69.583333' where id=33880;
+update datasetfieldvalue set value='19.216667' where id=33891;
+update datasetfieldvalue set value='19.216667' where id=33914;
+update datasetfieldvalue set value='19.216667' where id=33916;
+update datasetfieldvalue set value='69.583333' where id=33922;
+update datasetfieldvalue set value='69.583333' where id=33935;
+update datasetfieldvalue set value='69.583333' where id=33955;
+update datasetfieldvalue set value='69.583333' where id=33956;
+update datasetfieldvalue set value='19.216667' where id=33958;
+update datasetfieldvalue set value='19.216667' where id=33967;
+update datasetfieldvalue set value='69.583333' where id=33989;
+update datasetfieldvalue set value='19.216667' where id=33997;
+update datasetfieldvalue set value='19.216667' where id=34004;
+update datasetfieldvalue set value='69.583333' where id=34024;
+update datasetfieldvalue set value='69.583333' where id=34040;
+update datasetfieldvalue set value='19.216667' where id=34045;
+update datasetfieldvalue set value='69.583333' where id=34059;
+update datasetfieldvalue set value='19.216667' where id=34064;
+update datasetfieldvalue set value='19.216667' where id=34070;
+update datasetfieldvalue set value='19.216667' where id=34080;
+update datasetfieldvalue set value='69.583333' where id=34092;
+update datasetfieldvalue set value='69.583333' where id=34095;
+update datasetfieldvalue set value='19.216667' where id=34129;
+update datasetfieldvalue set value='19.216667' where id=34135;
+update datasetfieldvalue set value='69.583333' where id=34143;
+update datasetfieldvalue set value='69.583333' where id=34149;
+update datasetfieldvalue set value='19.216667' where id=34161;
+update datasetfieldvalue set value='19.216667' where id=34162;
+update datasetfieldvalue set value='69.583333' where id=34172;
+update datasetfieldvalue set value='69.583333' where id=34190;
+update datasetfieldvalue set value='69.583333' where id=34198;
+update datasetfieldvalue set value='19.216667' where id=34225;
+update datasetfieldvalue set value='69.583333' where id=34226;
+update datasetfieldvalue set value='19.216667' where id=34236;
+update datasetfieldvalue set value='19.216667' where id=34238;
+update datasetfieldvalue set value='19.216667' where id=34244;
+update datasetfieldvalue set value='69.583333' where id=34265;
+update datasetfieldvalue set value='69.583333' where id=34268;
+update datasetfieldvalue set value='69.583333' where id=34296;
+update datasetfieldvalue set value='19.216667' where id=34301;
+update datasetfieldvalue set value='19.216667' where id=34311;
+update datasetfieldvalue set value='69.583333' where id=34312;
+update datasetfieldvalue set value='69.583333' where id=34322;
+update datasetfieldvalue set value='19.216667' where id=34326;
+update datasetfieldvalue set value='69.583333' where id=34348;
+update datasetfieldvalue set value='19.216667' where id=34351;
+update datasetfieldvalue set value='69.583333' where id=34367;
+update datasetfieldvalue set value='19.216667' where id=34372;
+update datasetfieldvalue set value='69.583333' where id=34381;
+update datasetfieldvalue set value='19.216667' where id=34389;
+update datasetfieldvalue set value='19.216667' where id=34408;
+update datasetfieldvalue set value='69.583333' where id=34418;
+update datasetfieldvalue set value='19.216667' where id=34425;
+update datasetfieldvalue set value='69.583333' where id=34429;
+update datasetfieldvalue set value='19.216667' where id=34451;
+update datasetfieldvalue set value='19.216667' where id=34455;
+update datasetfieldvalue set value='69.583333' where id=34467;
+update datasetfieldvalue set value='69.583333' where id=34470;
+update datasetfieldvalue set value='19.216667' where id=34497;
+update datasetfieldvalue set value='69.583333' where id=34507;
+update datasetfieldvalue set value='19.216667' where id=34519;
+update datasetfieldvalue set value='69.583333' where id=34530;
+update datasetfieldvalue set value='69.583333' where id=34541;
+update datasetfieldvalue set value='69.583333' where id=34547;
+update datasetfieldvalue set value='19.216667' where id=34556;
+update datasetfieldvalue set value='19.216667' where id=34562;
+update datasetfieldvalue set value='69.583333' where id=34577;
+update datasetfieldvalue set value='19.216667' where id=34586;
+update datasetfieldvalue set value='69.583333' where id=34601;
+update datasetfieldvalue set value='19.216667' where id=34610;
+update datasetfieldvalue set value='69.583333' where id=34622;
+update datasetfieldvalue set value='19.216667' where id=34634;
+update datasetfieldvalue set value='19.216667' where id=34644;
+update datasetfieldvalue set value='69.583333' where id=34645;
+update datasetfieldvalue set value='69.583333' where id=34660;
+update datasetfieldvalue set value='19.216667' where id=34661;
+update datasetfieldvalue set value='69.583333' where id=34664;
+update datasetfieldvalue set value='19.216667' where id=34677;
+update datasetfieldvalue set value='69.583333' where id=34703;
+update datasetfieldvalue set value='69.583333' where id=34706;
+update datasetfieldvalue set value='19.216667' where id=34730;
+update datasetfieldvalue set value='19.216667' where id=34738;
+update datasetfieldvalue set value='69.583333' where id=34761;
+update datasetfieldvalue set value='69.583333' where id=34765;
+update datasetfieldvalue set value='19.216667' where id=34774;
+update datasetfieldvalue set value='19.216667' where id=34777;
+update datasetfieldvalue set value='69.583333' where id=34791;
+update datasetfieldvalue set value='19.216667' where id=34792;
+update datasetfieldvalue set value='69.583333' where id=34804;
+update datasetfieldvalue set value='19.216667' where id=34807;
+update datasetfieldvalue set value='19.216667' where id=34836;
+update datasetfieldvalue set value='69.583333' where id=34845;
+update datasetfieldvalue set value='69.583333' where id=34861;
+update datasetfieldvalue set value='19.216667' where id=34864;
+update datasetfieldvalue set value='19.216667' where id=34871;
+update datasetfieldvalue set value='69.583333' where id=34879;
+update datasetfieldvalue set value='19.216667' where id=34891;
+update datasetfieldvalue set value='69.583333' where id=34903;
+update datasetfieldvalue set value='69.583333' where id=34919;
+update datasetfieldvalue set value='19.216667' where id=34934;
+update datasetfieldvalue set value='19.216667' where id=34941;
+update datasetfieldvalue set value='69.583333' where id=34944;
+update datasetfieldvalue set value='69.583333' where id=34959;
+update datasetfieldvalue set value='69.583333' where id=34962;
+update datasetfieldvalue set value='19.216667' where id=34969;
+update datasetfieldvalue set value='19.216667' where id=34992;
+update datasetfieldvalue set value='19.216667' where id=34993;
+update datasetfieldvalue set value='19.216667' where id=34995;
+update datasetfieldvalue set value='69.583333' where id=35002;
+update datasetfieldvalue set value='69.583333' where id=35032;
+update datasetfieldvalue set value='19.216667' where id=35038;
+update datasetfieldvalue set value='69.583333' where id=35055;
+update datasetfieldvalue set value='69.583333' where id=35062;
+update datasetfieldvalue set value='19.216667' where id=35063;
+update datasetfieldvalue set value='69.583333' where id=35078;
+update datasetfieldvalue set value='19.216667' where id=35083;
+update datasetfieldvalue set value='19.216667' where id=35088;
+update datasetfieldvalue set value='69.583333' where id=35096;
+update datasetfieldvalue set value='69.583333' where id=35145;
+update datasetfieldvalue set value='69.583333' where id=35147;
+update datasetfieldvalue set value='19.216667' where id=35149;
+update datasetfieldvalue set value='19.216667' where id=35156;
+update datasetfieldvalue set value='19.216667' where id=35173;
+update datasetfieldvalue set value='69.583333' where id=35193;
+update datasetfieldvalue set value='69.583333' where id=35195;
+update datasetfieldvalue set value='19.216667' where id=35202;
+update datasetfieldvalue set value='69.583333' where id=35207;
+update datasetfieldvalue set value='19.216667' where id=35212;
+update datasetfieldvalue set value='69.583333' where id=35214;
+update datasetfieldvalue set value='19.216667' where id=35235;
+update datasetfieldvalue set value='19.216667' where id=35267;
+update datasetfieldvalue set value='69.583333' where id=35270;
+update datasetfieldvalue set value='19.216667' where id=35278;
+update datasetfieldvalue set value='69.583333' where id=35286;
+update datasetfieldvalue set value='19.216667' where id=35292;
+update datasetfieldvalue set value='69.583333' where id=35301;
+update datasetfieldvalue set value='19.216667' where id=35321;
+update datasetfieldvalue set value='69.583333' where id=35328;
+update datasetfieldvalue set value='69.583333' where id=35340;
+update datasetfieldvalue set value='19.216667' where id=35344;
+update datasetfieldvalue set value='69.583333' where id=35350;
+update datasetfieldvalue set value='19.216667' where id=35352;
+update datasetfieldvalue set value='19.216667' where id=35380;
+update datasetfieldvalue set value='69.583333' where id=35388;
+update datasetfieldvalue set value='19.216667' where id=35392;
+update datasetfieldvalue set value='69.583333' where id=35404;
+update datasetfieldvalue set value='69.583333' where id=35425;
+update datasetfieldvalue set value='19.216667' where id=35427;
+update datasetfieldvalue set value='69.583333' where id=35431;
+update datasetfieldvalue set value='19.216667' where id=35433;
+update datasetfieldvalue set value='69.583333' where id=35474;
+update datasetfieldvalue set value='19.216667' where id=35476;
+update datasetfieldvalue set value='69.583333' where id=35492;
+update datasetfieldvalue set value='19.216667' where id=35493;
+update datasetfieldvalue set value='69.583333' where id=35499;
+update datasetfieldvalue set value='69.583333' where id=35509;
+update datasetfieldvalue set value='19.216667' where id=35529;
+update datasetfieldvalue set value='19.216667' where id=35533;
+update datasetfieldvalue set value='19.216667' where id=35546;
+update datasetfieldvalue set value='69.583333' where id=35566;
+update datasetfieldvalue set value='19.216667' where id=35570;
+update datasetfieldvalue set value='69.583333' where id=35577;
+update datasetfieldvalue set value='19.216667' where id=35598;
+update datasetfieldvalue set value='69.583333' where id=35601;
+update datasetfieldvalue set value='69.583333' where id=35602;
+update datasetfieldvalue set value='19.216667' where id=35603;
+update datasetfieldvalue set value='19.216667' where id=35627;
+update datasetfieldvalue set value='69.583333' where id=35631;
+update datasetfieldvalue set value='69.583333' where id=35643;
+update datasetfieldvalue set value='19.216667' where id=35650;
+update datasetfieldvalue set value='19.216667' where id=35666;
+update datasetfieldvalue set value='69.583333' where id=35696;
+update datasetfieldvalue set value='19.216667' where id=35700;
+update datasetfieldvalue set value='69.583333' where id=35704;
+update datasetfieldvalue set value='19.216667' where id=35711;
+update datasetfieldvalue set value='19.216667' where id=35732;
+update datasetfieldvalue set value='69.583333' where id=35737;
+update datasetfieldvalue set value='69.583333' where id=35739;
+update datasetfieldvalue set value='19.216667' where id=35757;
+update datasetfieldvalue set value='69.583333' where id=35765;
+update datasetfieldvalue set value='69.583333' where id=35768;
+update datasetfieldvalue set value='19.216667' where id=35771;
+update datasetfieldvalue set value='19.216667' where id=35802;
+update datasetfieldvalue set value='19.216667' where id=35803;
+update datasetfieldvalue set value='69.583333' where id=35805;
+update datasetfieldvalue set value='69.583333' where id=35823;
+update datasetfieldvalue set value='69.583333' where id=35838;
+update datasetfieldvalue set value='19.216667' where id=35860;
+update datasetfieldvalue set value='19.216667' where id=35862;
+update datasetfieldvalue set value='69.583333' where id=35869;
+update datasetfieldvalue set value='19.216667' where id=35876;
+update datasetfieldvalue set value='69.583333' where id=35906;
+update datasetfieldvalue set value='69.583333' where id=35910;
+update datasetfieldvalue set value='19.216667' where id=35913;
+update datasetfieldvalue set value='19.216667' where id=35924;
+update datasetfieldvalue set value='69.583333' where id=35931;
+update datasetfieldvalue set value='19.216667' where id=35933;
+update datasetfieldvalue set value='69.583333' where id=35947;
+update datasetfieldvalue set value='69.583333' where id=35961;
+update datasetfieldvalue set value='19.216667' where id=35967;
+update datasetfieldvalue set value='19.216667' where id=35987;
+update datasetfieldvalue set value='69.583333' where id=35993;
+update datasetfieldvalue set value='69.583333' where id=36012;
+update datasetfieldvalue set value='19.216667' where id=36030;
+update datasetfieldvalue set value='69.583333' where id=36033;
+update datasetfieldvalue set value='19.216667' where id=36042;
+update datasetfieldvalue set value='19.216667' where id=36048;
+update datasetfieldvalue set value='69.583333' where id=36052;
+update datasetfieldvalue set value='69.583333' where id=36070;
+update datasetfieldvalue set value='19.216667' where id=36073;
+update datasetfieldvalue set value='69.583333' where id=36086;
+update datasetfieldvalue set value='19.216667' where id=36101;
+update datasetfieldvalue set value='69.583333' where id=36105;
+update datasetfieldvalue set value='19.216667' where id=36110;
+update datasetfieldvalue set value='69.583333' where id=36151;
+update datasetfieldvalue set value='19.216667' where id=36152;
+update datasetfieldvalue set value='19.216667' where id=36158;
+update datasetfieldvalue set value='69.583333' where id=36159;
+update datasetfieldvalue set value='19.216667' where id=36170;
+update datasetfieldvalue set value='19.216667' where id=36190;
+update datasetfieldvalue set value='69.583333' where id=36197;
+update datasetfieldvalue set value='69.583333' where id=36203;
+update datasetfieldvalue set value='19.216667' where id=36219;
+update datasetfieldvalue set value='69.583333' where id=36221;
+update datasetfieldvalue set value='69.583333' where id=36234;
+update datasetfieldvalue set value='19.216667' where id=36244;
+update datasetfieldvalue set value='69.583333' where id=36266;
+update datasetfieldvalue set value='19.216667' where id=36271;
+update datasetfieldvalue set value='19.216667' where id=36287;
+update datasetfieldvalue set value='69.583333' where id=36289;
+update datasetfieldvalue set value='69.583333' where id=36318;
+update datasetfieldvalue set value='19.216667' where id=36323;
+update datasetfieldvalue set value='19.216667' where id=36326;
+update datasetfieldvalue set value='69.583333' where id=36333;
+update datasetfieldvalue set value='19.216667' where id=36346;
+update datasetfieldvalue set value='69.583333' where id=36347;
+update datasetfieldvalue set value='69.583333' where id=36352;
+update datasetfieldvalue set value='19.216667' where id=36368;
+update datasetfieldvalue set value='69.583333' where id=36408;
+update datasetfieldvalue set value='19.216667' where id=36426;
+update datasetfieldvalue set value='69.583333' where id=36445;
+update datasetfieldvalue set value='19.216667' where id=36446;
+update datasetfieldvalue set value='69.583333' where id=36455;
+update datasetfieldvalue set value='19.216667' where id=36464;
+update datasetfieldvalue set value='69.583333' where id=36470;
+update datasetfieldvalue set value='19.216667' where id=36472;
+update datasetfieldvalue set value='19.216667' where id=36505;
+update datasetfieldvalue set value='19.216667' where id=36506;
+update datasetfieldvalue set value='69.583333' where id=36522;
+update datasetfieldvalue set value='69.583333' where id=36523;
+update datasetfieldvalue set value='19.216667' where id=36533;
+update datasetfieldvalue set value='19.216667' where id=36537;
+update datasetfieldvalue set value='69.583333' where id=36543;
+update datasetfieldvalue set value='69.583333' where id=36558;
+update datasetfieldvalue set value='69.583333' where id=36576;
+update datasetfieldvalue set value='19.216667' where id=36580;
+update datasetfieldvalue set value='19.216667' where id=36589;
+update datasetfieldvalue set value='69.583333' where id=36602;
+update datasetfieldvalue set value='69.583333' where id=36631;
+update datasetfieldvalue set value='19.216667' where id=36648;
+update datasetfieldvalue set value='19.216667' where id=36656;
+update datasetfieldvalue set value='69.583333' where id=36658;
+update datasetfieldvalue set value='19.216667' where id=36662;
+update datasetfieldvalue set value='19.216667' where id=36665;
+update datasetfieldvalue set value='69.583333' where id=36672;
+update datasetfieldvalue set value='69.583333' where id=36698;
+update datasetfieldvalue set value='69.583333' where id=36703;
+update datasetfieldvalue set value='19.216667' where id=36708;
+update datasetfieldvalue set value='19.216667' where id=36711;
+update datasetfieldvalue set value='69.583333' where id=36736;
+update datasetfieldvalue set value='69.583333' where id=36744;
+update datasetfieldvalue set value='19.216667' where id=36745;
+update datasetfieldvalue set value='69.583333' where id=36776;
+update datasetfieldvalue set value='19.216667' where id=36779;
+update datasetfieldvalue set value='69.583333' where id=36791;
+update datasetfieldvalue set value='19.216667' where id=36793;
+update datasetfieldvalue set value='69.583333' where id=36808;
+update datasetfieldvalue set value='19.216667' where id=36809;
+update datasetfieldvalue set value='69.583333' where id=36837;
+update datasetfieldvalue set value='19.216667' where id=36841;
+update datasetfieldvalue set value='69.583333' where id=36852;
+update datasetfieldvalue set value='19.216667' where id=36865;
+update datasetfieldvalue set value='19.216667' where id=36872;
+update datasetfieldvalue set value='19.216667' where id=36880;
+update datasetfieldvalue set value='69.583333' where id=36901;
+update datasetfieldvalue set value='69.583333' where id=36905;
+update datasetfieldvalue set value='69.583333' where id=36924;
+update datasetfieldvalue set value='19.216667' where id=36929;
+update datasetfieldvalue set value='69.583333' where id=36934;
+update datasetfieldvalue set value='19.216667' where id=36950;
+update datasetfieldvalue set value='19.216667' where id=36959;
+update datasetfieldvalue set value='69.583333' where id=36973;
+update datasetfieldvalue set value='19.216667' where id=36985;
+update datasetfieldvalue set value='69.583333' where id=36994;
+update datasetfieldvalue set value='19.216667' where id=36999;
+update datasetfieldvalue set value='19.216667' where id=37006;
+update datasetfieldvalue set value='69.583333' where id=37012;
+update datasetfieldvalue set value='69.583333' where id=37018;
+update datasetfieldvalue set value='69.583333' where id=37039;
+update datasetfieldvalue set value='19.216667' where id=37053;
+update datasetfieldvalue set value='19.216667' where id=37059;
+update datasetfieldvalue set value='69.583333' where id=37075;
+update datasetfieldvalue set value='69.583333' where id=37079;
+update datasetfieldvalue set value='19.216667' where id=37084;
+update datasetfieldvalue set value='19.216667' where id=37086;
+update datasetfieldvalue set value='69.583333' where id=37120;
+update datasetfieldvalue set value='-2.0' where id=37232;
+update datasetfieldvalue set value='-79.0' where id=37234;
+update datasetfieldvalue set value='-82.0' where id=37242;
+update datasetfieldvalue set value='-4.0' where id=37252;
+update datasetfieldvalue set value='69.583333' where id=38269;
+update datasetfieldvalue set value='69.583333' where id=38277;
+update datasetfieldvalue set value='19.216667' where id=38289;
+update datasetfieldvalue set value='19.216667' where id=38291;
+update datasetfieldvalue set value='19.216667' where id=38305;
+update datasetfieldvalue set value='69.583333' where id=38320;
+update datasetfieldvalue set value='19.216667' where id=38335;
+update datasetfieldvalue set value='69.583333' where id=38344;
+update datasetfieldvalue set value='4.9' where id=38654;
+update datasetfieldvalue set value='31.1' where id=38657;
+update datasetfieldvalue set value='58.0' where id=38660;
+update datasetfieldvalue set value='71.1' where id=38663;
+update datasetfieldvalue set value='4.9' where id=38666;
+update datasetfieldvalue set value='71.1' where id=38672;
+update datasetfieldvalue set value='58.0' where id=38691;
+update datasetfieldvalue set value='31.1' where id=38695;
+update datasetfieldvalue set value='73.3' where id=42813;
+update datasetfieldvalue set value='7.05' where id=42815;
+update datasetfieldvalue set value='73.4' where id=42816;
+update datasetfieldvalue set value='8.25' where id=42818;
+update datasetfieldvalue set value='7.05' where id=43296;
+update datasetfieldvalue set value='73.4' where id=43322;
+update datasetfieldvalue set value='8.25' where id=43333;
+update datasetfieldvalue set value='73.3' where id=43339;
+update datasetfieldvalue set value='58.0' where id=45387;
+update datasetfieldvalue set value='18.0' where id=45391;
+update datasetfieldvalue set value='11.0' where id=45394;
+update datasetfieldvalue set value='62.0' where id=45395;
+update datasetfieldvalue set value='18.0' where id=45467;
+update datasetfieldvalue set value='58.0' where id=45470;
+update datasetfieldvalue set value='11.0' where id=45471;
+update datasetfieldvalue set value='62.0' where id=45487;
+update datasetfieldvalue set value='73.3' where id=46004;
+update datasetfieldvalue set value='7.05' where id=46021;
+update datasetfieldvalue set value='73.4' where id=46023;
+update datasetfieldvalue set value='8.25' where id=46039;
+update datasetfieldvalue set value='38.93792' where id=47525;
+update datasetfieldvalue set value='110.13892' where id=47531;
+update datasetfieldvalue set value='69.583333' where id=56318;
+update datasetfieldvalue set value='19.216667' where id=56331;
+update datasetfieldvalue set value='69.583333' where id=56339;
+update datasetfieldvalue set value='19.216667' where id=56340;
+update datasetfieldvalue set value='5.5' where id=56853;
+update datasetfieldvalue set value='78.5' where id=56861;
+update datasetfieldvalue set value='78.3' where id=56869;
+update datasetfieldvalue set value='60.630' where id=58455;
+update datasetfieldvalue set value='7.596' where id=58456;
+update datasetfieldvalue set value='7.694' where id=58461;
+update datasetfieldvalue set value='60.545' where id=58462;
+update datasetfieldvalue set value='7.694' where id=60082;
+update datasetfieldvalue set value='60.545' where id=60083;
+update datasetfieldvalue set value='7.596' where id=60097;
+update datasetfieldvalue set value='60.630' where id=60098;
+update datasetfieldvalue set value='158' where id=60706;
+update datasetfieldvalue set value='74' where id=60707;
+update datasetfieldvalue set value='31.2' where id=60779;
+update datasetfieldvalue set value='71.2' where id=60798;
+update datasetfieldvalue set value='4.5' where id=60810;
+update datasetfieldvalue set value='57.9' where id=60820;
+update datasetfieldvalue set value='66.027306' where id=63491;
+update datasetfieldvalue set value='16.467778' where id=63495;
+update datasetfieldvalue set value='16.45' where id=63714;
+update datasetfieldvalue set value='66.016667' where id=63715;
+update datasetfieldvalue set value='78.659556' where id=64995;
+update datasetfieldvalue set value='16.435583' where id=64997;
+update datasetfieldvalue set value='16.405389' where id=64998;
+update datasetfieldvalue set value='78.656806' where id=64999;
+update datasetfieldvalue set value='14.344' where id=75142;
+update datasetfieldvalue set value='14.866' where id=75152;
+update datasetfieldvalue set value='68.950' where id=75164;
+update datasetfieldvalue set value='69.026' where id=75165;
+update datasetfieldvalue set value='59.663056' where id=75211;
+update datasetfieldvalue set value='10.761667' where id=75227;
+update datasetfieldvalue set value='69.026' where id=75885;
+update datasetfieldvalue set value='14.344' where id=75942;
+update datasetfieldvalue set value='68.950' where id=75953;
+update datasetfieldvalue set value='14.866' where id=75961;
+update datasetfieldvalue set value='78.5' where id=76462;
+update datasetfieldvalue set value='5.5' where id=76463;
+update datasetfieldvalue set value='78.3' where id=76495;
+update datasetfieldvalue set value='76.147778' where id=76803;
+update datasetfieldvalue set value='15.824444' where id=76805;
+update datasetfieldvalue set value='76.084167' where id=76808;
+update datasetfieldvalue set value='16.148889' where id=76811;
+update datasetfieldvalue set value='60.545' where id=79802;
+update datasetfieldvalue set value='7.596' where id=79811;
+update datasetfieldvalue set value='60.630' where id=79820;
+update datasetfieldvalue set value='7.694' where id=79824;
+update datasetfieldvalue set value='76.2' where id=81254;
+update datasetfieldvalue set value='59.663056' where id=82117;
+update datasetfieldvalue set value='39.466667' where id=82119;
+update datasetfieldvalue set value='12.366667' where id=82123;
+update datasetfieldvalue set value='10.761667' where id=82202;
+update datasetfieldvalue set value='59.663056' where id=82645;
+update datasetfieldvalue set value='10.761667' where id=82646;
+update datasetfieldvalue set value='71.38' where id=82923;
+update datasetfieldvalue set value='4.09' where id=82924;
+update datasetfieldvalue set value='31.76' where id=82926;
+update datasetfieldvalue set value='57.76' where id=82927;
+update datasetfieldvalue set value='64' where id=86065;
+update datasetfieldvalue set value='62' where id=86078;
+update datasetfieldvalue set value='9' where id=86083;
+update datasetfieldvalue set value='6' where id=86084;
+update datasetfieldvalue set value='71.38' where id=87035;
+update datasetfieldvalue set value='31.76' where id=87037;
+update datasetfieldvalue set value='4.09' where id=87047;
+update datasetfieldvalue set value='57.76' where id=87064;
+update datasetfieldvalue set value='12.3' where id=87431;
+update datasetfieldvalue set value='57.5' where id=87434;
+update datasetfieldvalue set value='11.5' where id=87435;
+update datasetfieldvalue set value='57.9' where id=87436;
+update datasetfieldvalue set value='82.86' where id=87443;
+update datasetfieldvalue set value='82.92' where id=87444;
+update datasetfieldvalue set value='6.36' where id=87445;
+update datasetfieldvalue set value='6.12' where id=87446;
+update datasetfieldvalue set value='16.435583' where id=88079;
+update datasetfieldvalue set value='78.659556' where id=88091;
+update datasetfieldvalue set value='16.405389' where id=88099;
+update datasetfieldvalue set value='78.656806' where id=88108;
+update datasetfieldvalue set value='31.76' where id=88439;
+update datasetfieldvalue set value='71.38' where id=88454;
+update datasetfieldvalue set value='57.76' where id=88460;
+update datasetfieldvalue set value='4.09' where id=88478;
+update datasetfieldvalue set value='31.76' where id=88516;
+update datasetfieldvalue set value='4.09' where id=88530;
+update datasetfieldvalue set value='57.76' where id=88531;
+update datasetfieldvalue set value='71.38' where id=88533;
+update datasetfieldvalue set value='50.80' where id=89483;
+update datasetfieldvalue set value='-40' where id=89484;
+update datasetfieldvalue set value='69.52' where id=89732;
+update datasetfieldvalue set value='19.10' where id=89733;
+update datasetfieldvalue set value='69.55' where id=89740;
+update datasetfieldvalue set value='19.01' where id=89747;
+update datasetfieldvalue set value='57.76' where id=91572;
+update datasetfieldvalue set value='31.76' where id=91577;
+update datasetfieldvalue set value='4.09' where id=91590;
+update datasetfieldvalue set value='71.38' where id=91606;
+update datasetfieldvalue set value='59.663056' where id=97274;
+update datasetfieldvalue set value='10.761667' where id=97276;
+update datasetfieldvalue set value='68.82' where id=97521;
+update datasetfieldvalue set value='68.82' where id=97522;
+update datasetfieldvalue set value='16.48' where id=97523;
+update datasetfieldvalue set value='16.48' where id=97524;
+update datasetfieldvalue set value='60.20' where id=97539;
+update datasetfieldvalue set value='60.53' where id=97540;
+update datasetfieldvalue set value='5.69' where id=97541;
+update datasetfieldvalue set value='5.17' where id=97542;
+update datasetfieldvalue set value='59.663056' where id=100347;
+update datasetfieldvalue set value='10.761667' where id=100363;
+update datasetfieldvalue set value='59.0636' where id=102440;
+update datasetfieldvalue set value='9.8349' where id=102443;
+update datasetfieldvalue set value='69.583333' where id=103825;
+update datasetfieldvalue set value='19.216667' where id=103830;
+update datasetfieldvalue set value='69.583333' where id=103833;
+update datasetfieldvalue set value='19.216667' where id=103847;
+update datasetfieldvalue set value='19.216667' where id=103867;
+update datasetfieldvalue set value='69.583333' where id=103885;
+update datasetfieldvalue set value='69.583333' where id=103894;
+update datasetfieldvalue set value='19.216667' where id=103896;
+update datasetfieldvalue set value='69.583333' where id=103906;
+update datasetfieldvalue set value='69.583333' where id=103908;
+update datasetfieldvalue set value='19.216667' where id=103921;
+update datasetfieldvalue set value='19.216667' where id=103932;
+update datasetfieldvalue set value='69.583333' where id=103945;
+update datasetfieldvalue set value='19.216667' where id=103947;
+update datasetfieldvalue set value='69.583333' where id=103951;
+update datasetfieldvalue set value='19.216667' where id=103972;
+update datasetfieldvalue set value='19.216667' where id=103994;
+update datasetfieldvalue set value='69.583333' where id=104005;
+update datasetfieldvalue set value='19.216667' where id=104007;
+update datasetfieldvalue set value='69.583333' where id=104024;
+update datasetfieldvalue set value='69.583333' where id=104032;
+update datasetfieldvalue set value='69.583333' where id=104037;
+update datasetfieldvalue set value='19.216667' where id=104048;
+update datasetfieldvalue set value='19.216667' where id=104065;
+update datasetfieldvalue set value='69.583333' where id=104073;
+update datasetfieldvalue set value='19.216667' where id=104088;
+update datasetfieldvalue set value='69.583333' where id=104091;
+update datasetfieldvalue set value='19.216667' where id=104095;
+update datasetfieldvalue set value='19.216667' where id=104114;
+update datasetfieldvalue set value='69.583333' where id=104128;
+update datasetfieldvalue set value='69.583333' where id=104139;
+update datasetfieldvalue set value='19.216667' where id=104147;
+update datasetfieldvalue set value='19.216667' where id=104158;
+update datasetfieldvalue set value='69.583333' where id=104164;
+update datasetfieldvalue set value='69.583333' where id=104167;
+update datasetfieldvalue set value='19.216667' where id=104175;
+update datasetfieldvalue set value='69.583333' where id=104196;
+update datasetfieldvalue set value='19.216667' where id=104201;
+update datasetfieldvalue set value='19.216667' where id=104206;
+update datasetfieldvalue set value='69.583333' where id=104217;
+update datasetfieldvalue set value='69.583333' where id=104238;
+update datasetfieldvalue set value='69.583333' where id=104255;
+update datasetfieldvalue set value='19.216667' where id=104257;
+update datasetfieldvalue set value='19.216667' where id=104262;
+update datasetfieldvalue set value='69.583333' where id=104282;
+update datasetfieldvalue set value='19.216667' where id=104284;
+update datasetfieldvalue set value='69.583333' where id=104291;
+update datasetfieldvalue set value='19.216667' where id=104295;
+update datasetfieldvalue set value='69.583333' where id=104327;
+update datasetfieldvalue set value='19.216667' where id=104328;
+update datasetfieldvalue set value='69.583333' where id=104331;
+update datasetfieldvalue set value='19.216667' where id=104350;
+update datasetfieldvalue set value='69.583333' where id=104376;
+update datasetfieldvalue set value='69.583333' where id=104379;
+update datasetfieldvalue set value='19.216667' where id=104389;
+update datasetfieldvalue set value='19.216667' where id=104401;
+update datasetfieldvalue set value='69.583333' where id=104407;
+update datasetfieldvalue set value='19.216667' where id=104418;
+update datasetfieldvalue set value='69.583333' where id=104434;
+update datasetfieldvalue set value='19.216667' where id=104441;
+update datasetfieldvalue set value='19.216667' where id=104456;
+update datasetfieldvalue set value='19.216667' where id=104462;
+update datasetfieldvalue set value='69.583333' where id=104468;
+update datasetfieldvalue set value='69.583333' where id=104475;
+update datasetfieldvalue set value='19.216667' where id=104503;
+update datasetfieldvalue set value='69.583333' where id=104512;
+update datasetfieldvalue set value='19.216667' where id=104521;
+update datasetfieldvalue set value='69.583333' where id=104522;
+update datasetfieldvalue set value='69.583333' where id=104540;
+update datasetfieldvalue set value='19.216667' where id=104546;
+update datasetfieldvalue set value='69.583333' where id=104552;
+update datasetfieldvalue set value='19.216667' where id=104560;
+update datasetfieldvalue set value='19.216667' where id=104588;
+update datasetfieldvalue set value='69.583333' where id=104591;
+update datasetfieldvalue set value='19.216667' where id=104594;
+update datasetfieldvalue set value='69.583333' where id=104609;
+update datasetfieldvalue set value='69.583333' where id=104617;
+update datasetfieldvalue set value='19.216667' where id=104630;
+update datasetfieldvalue set value='69.583333' where id=104632;
+update datasetfieldvalue set value='19.216667' where id=104635;
+update datasetfieldvalue set value='69.583333' where id=104661;
+update datasetfieldvalue set value='19.216667' where id=104683;
+update datasetfieldvalue set value='69.583333' where id=104686;
+update datasetfieldvalue set value='19.216667' where id=104698;
+update datasetfieldvalue set value='19.216667' where id=104705;
+update datasetfieldvalue set value='69.583333' where id=104706;
+update datasetfieldvalue set value='19.216667' where id=104721;
+update datasetfieldvalue set value='69.583333' where id=104735;
+update datasetfieldvalue set value='19.216667' where id=104758;
+update datasetfieldvalue set value='69.583333' where id=104760;
+update datasetfieldvalue set value='69.583333' where id=104765;
+update datasetfieldvalue set value='19.216667' where id=104781;
+update datasetfieldvalue set value='69.583333' where id=104787;
+update datasetfieldvalue set value='19.216667' where id=104788;
+update datasetfieldvalue set value='19.216667' where id=104804;
+update datasetfieldvalue set value='69.583333' where id=104809;
+update datasetfieldvalue set value='19.216667' where id=104833;
+update datasetfieldvalue set value='69.583333' where id=104834;
+update datasetfieldvalue set value='19.216667' where id=104855;
+update datasetfieldvalue set value='69.583333' where id=104867;
+update datasetfieldvalue set value='69.583333' where id=104874;
+update datasetfieldvalue set value='19.216667' where id=104878;
+update datasetfieldvalue set value='19.216667' where id=104895;
+update datasetfieldvalue set value='69.583333' where id=104904;
+update datasetfieldvalue set value='19.216667' where id=104918;
+update datasetfieldvalue set value='19.216667' where id=104920;
+update datasetfieldvalue set value='69.583333' where id=104929;
+update datasetfieldvalue set value='69.583333' where id=104942;
+update datasetfieldvalue set value='69.583333' where id=104964;
+update datasetfieldvalue set value='19.216667' where id=104984;
+update datasetfieldvalue set value='69.583333' where id=104989;
+update datasetfieldvalue set value='19.216667' where id=104992;
+update datasetfieldvalue set value='69.583333' where id=105000;
+update datasetfieldvalue set value='69.583333' where id=105017;
+update datasetfieldvalue set value='19.216667' where id=105021;
+update datasetfieldvalue set value='19.216667' where id=105033;
+update datasetfieldvalue set value='69.583333' where id=105058;
+update datasetfieldvalue set value='69.583333' where id=105068;
+update datasetfieldvalue set value='19.216667' where id=105070;
+update datasetfieldvalue set value='19.216667' where id=105076;
+update datasetfieldvalue set value='69.583333' where id=105087;
+update datasetfieldvalue set value='19.216667' where id=105092;
+update datasetfieldvalue set value='19.216667' where id=105104;
+update datasetfieldvalue set value='69.583333' where id=105116;
+update datasetfieldvalue set value='69.583333' where id=105124;
+update datasetfieldvalue set value='19.216667' where id=105127;
+update datasetfieldvalue set value='19.216667' where id=105143;
+update datasetfieldvalue set value='69.583333' where id=105153;
+update datasetfieldvalue set value='69.583333' where id=105182;
+update datasetfieldvalue set value='19.216667' where id=105186;
+update datasetfieldvalue set value='19.216667' where id=105187;
+update datasetfieldvalue set value='69.583333' where id=105199;
+update datasetfieldvalue set value='69.583333' where id=105216;
+update datasetfieldvalue set value='19.216667' where id=105222;
+update datasetfieldvalue set value='19.216667' where id=105232;
+update datasetfieldvalue set value='69.583333' where id=105239;
+update datasetfieldvalue set value='19.216667' where id=105262;
+update datasetfieldvalue set value='69.583333' where id=105274;
+update datasetfieldvalue set value='69.583333' where id=105275;
+update datasetfieldvalue set value='19.216667' where id=105278;
+update datasetfieldvalue set value='69.583333' where id=105292;
+update datasetfieldvalue set value='19.216667' where id=105294;
+update datasetfieldvalue set value='69.583333' where id=105299;
+update datasetfieldvalue set value='19.216667' where id=105319;
+update datasetfieldvalue set value='69.583333' where id=105331;
+update datasetfieldvalue set value='19.216667' where id=105336;
+update datasetfieldvalue set value='19.216667' where id=105347;
+update datasetfieldvalue set value='69.583333' where id=105362;
+update datasetfieldvalue set value='69.583333' where id=105373;
+update datasetfieldvalue set value='19.216667' where id=105379;
+update datasetfieldvalue set value='69.583333' where id=105381;
+update datasetfieldvalue set value='19.216667' where id=105390;
+update datasetfieldvalue set value='19.216667' where id=105427;
+update datasetfieldvalue set value='19.216667' where id=105432;
+update datasetfieldvalue set value='69.583333' where id=105434;
+update datasetfieldvalue set value='69.583333' where id=105454;
+update datasetfieldvalue set value='19.216667' where id=105467;
+update datasetfieldvalue set value='69.583333' where id=105468;
+update datasetfieldvalue set value='69.583333' where id=105474;
+update datasetfieldvalue set value='19.216667' where id=105475;
+update datasetfieldvalue set value='69.583333' where id=105501;
+update datasetfieldvalue set value='69.583333' where id=105506;
+update datasetfieldvalue set value='19.216667' where id=105509;
+update datasetfieldvalue set value='19.216667' where id=105520;
+update datasetfieldvalue set value='69.583333' where id=105545;
+update datasetfieldvalue set value='19.216667' where id=105546;
+update datasetfieldvalue set value='19.216667' where id=105568;
+update datasetfieldvalue set value='69.583333' where id=105570;
+update datasetfieldvalue set value='19.216667' where id=105588;
+update datasetfieldvalue set value='69.583333' where id=105606;
+update datasetfieldvalue set value='69.583333' where id=105617;
+update datasetfieldvalue set value='19.216667' where id=105622;
+update datasetfieldvalue set value='19.216667' where id=105629;
+update datasetfieldvalue set value='69.583333' where id=105648;
+update datasetfieldvalue set value='69.583333' where id=105649;
+update datasetfieldvalue set value='19.216667' where id=105653;
+update datasetfieldvalue set value='69.583333' where id=105682;
+update datasetfieldvalue set value='69.583333' where id=105693;
+update datasetfieldvalue set value='19.216667' where id=105694;
+update datasetfieldvalue set value='19.216667' where id=105707;
+update datasetfieldvalue set value='69.583333' where id=105740;
+update datasetfieldvalue set value='19.216667' where id=105741;
+update datasetfieldvalue set value='69.583333' where id=105743;
+update datasetfieldvalue set value='19.216667' where id=105744;
+update datasetfieldvalue set value='19.216667' where id=105754;
+update datasetfieldvalue set value='69.583333' where id=105761;
+update datasetfieldvalue set value='69.583333' where id=105773;
+update datasetfieldvalue set value='19.216667' where id=105785;
+update datasetfieldvalue set value='69.583333' where id=105794;
+update datasetfieldvalue set value='19.216667' where id=105811;
+update datasetfieldvalue set value='69.583333' where id=105827;
+update datasetfieldvalue set value='19.216667' where id=105831;
+update datasetfieldvalue set value='19.216667' where id=105856;
+update datasetfieldvalue set value='69.583333' where id=105868;
+update datasetfieldvalue set value='69.583333' where id=105870;
+update datasetfieldvalue set value='19.216667' where id=105872;
+update datasetfieldvalue set value='69.583333' where id=105888;
+update datasetfieldvalue set value='19.216667' where id=105899;
+update datasetfieldvalue set value='69.583333' where id=105900;
+update datasetfieldvalue set value='19.216667' where id=105902;
+update datasetfieldvalue set value='69.583333' where id=105932;
+update datasetfieldvalue set value='19.216667' where id=105944;
+update datasetfieldvalue set value='69.583333' where id=105947;
+update datasetfieldvalue set value='19.216667' where id=105959;
+update datasetfieldvalue set value='19.216667' where id=105972;
+update datasetfieldvalue set value='69.583333' where id=105978;
+update datasetfieldvalue set value='69.583333' where id=105986;
+update datasetfieldvalue set value='19.216667' where id=106001;
+update datasetfieldvalue set value='19.216667' where id=106009;
+update datasetfieldvalue set value='69.583333' where id=106013;
+update datasetfieldvalue set value='19.216667' where id=106024;
+update datasetfieldvalue set value='69.583333' where id=106040;
+update datasetfieldvalue set value='69.583333' where id=106048;
+update datasetfieldvalue set value='19.216667' where id=106052;
+update datasetfieldvalue set value='69.583333' where id=106054;
+update datasetfieldvalue set value='19.216667' where id=106080;
+update datasetfieldvalue set value='19.216667' where id=106100;
+update datasetfieldvalue set value='69.583333' where id=106105;
+update datasetfieldvalue set value='69.583333' where id=106108;
+update datasetfieldvalue set value='19.216667' where id=106110;
+update datasetfieldvalue set value='69.583333' where id=106150;
+update datasetfieldvalue set value='19.216667' where id=106152;
+update datasetfieldvalue set value='69.583333' where id=106153;
+update datasetfieldvalue set value='19.216667' where id=106157;
+update datasetfieldvalue set value='19.216667' where id=106171;
+update datasetfieldvalue set value='69.583333' where id=106182;
+update datasetfieldvalue set value='19.216667' where id=106185;
+update datasetfieldvalue set value='69.583333' where id=106191;
+update datasetfieldvalue set value='19.216667' where id=106230;
+update datasetfieldvalue set value='69.583333' where id=106235;
+update datasetfieldvalue set value='69.583333' where id=106246;
+update datasetfieldvalue set value='19.216667' where id=106250;
+update datasetfieldvalue set value='69.583333' where id=106259;
+update datasetfieldvalue set value='19.216667' where id=106262;
+update datasetfieldvalue set value='19.216667' where id=106280;
+update datasetfieldvalue set value='69.583333' where id=106285;
+update datasetfieldvalue set value='19.216667' where id=106298;
+update datasetfieldvalue set value='69.583333' where id=106306;
+update datasetfieldvalue set value='19.216667' where id=106309;
+update datasetfieldvalue set value='69.583333' where id=106335;
+update datasetfieldvalue set value='69.583333' where id=106349;
+update datasetfieldvalue set value='69.583333' where id=106353;
+update datasetfieldvalue set value='19.216667' where id=106366;
+update datasetfieldvalue set value='19.216667' where id=106368;
+update datasetfieldvalue set value='69.583333' where id=106383;
+update datasetfieldvalue set value='19.216667' where id=106384;
+update datasetfieldvalue set value='69.583333' where id=106396;
+update datasetfieldvalue set value='19.216667' where id=106409;
+update datasetfieldvalue set value='19.216667' where id=106426;
+update datasetfieldvalue set value='19.216667' where id=106443;
+update datasetfieldvalue set value='69.583333' where id=106447;
+update datasetfieldvalue set value='69.583333' where id=106461;
+update datasetfieldvalue set value='69.583333' where id=106466;
+update datasetfieldvalue set value='19.216667' where id=106474;
+update datasetfieldvalue set value='19.216667' where id=106482;
+update datasetfieldvalue set value='69.583333' where id=106497;
+update datasetfieldvalue set value='69.583333' where id=106514;
+update datasetfieldvalue set value='19.216667' where id=106521;
+update datasetfieldvalue set value='69.583333' where id=106529;
+update datasetfieldvalue set value='19.216667' where id=106533;
+update datasetfieldvalue set value='19.216667' where id=106550;
+update datasetfieldvalue set value='69.583333' where id=106559;
+update datasetfieldvalue set value='69.583333' where id=106576;
+update datasetfieldvalue set value='19.216667' where id=106579;
+update datasetfieldvalue set value='69.583333' where id=106601;
+update datasetfieldvalue set value='19.216667' where id=106605;
+update datasetfieldvalue set value='69.583333' where id=106614;
+update datasetfieldvalue set value='19.216667' where id=106631;
+update datasetfieldvalue set value='69.583333' where id=106636;
+update datasetfieldvalue set value='19.216667' where id=106644;
+update datasetfieldvalue set value='19.216667' where id=106652;
+update datasetfieldvalue set value='69.583333' where id=106655;
+update datasetfieldvalue set value='19.216667' where id=106675;
+update datasetfieldvalue set value='69.583333' where id=106684;
+update datasetfieldvalue set value='19.216667' where id=106692;
+update datasetfieldvalue set value='69.583333' where id=106709;
+update datasetfieldvalue set value='19.216667' where id=106724;
+update datasetfieldvalue set value='19.216667' where id=106726;
+update datasetfieldvalue set value='69.583333' where id=106729;
+update datasetfieldvalue set value='69.583333' where id=106745;
+update datasetfieldvalue set value='69.583333' where id=106773;
+update datasetfieldvalue set value='69.583333' where id=106776;
+update datasetfieldvalue set value='19.216667' where id=106781;
+update datasetfieldvalue set value='19.216667' where id=106799;
+update datasetfieldvalue set value='69.583333' where id=106812;
+update datasetfieldvalue set value='69.583333' where id=106813;
+update datasetfieldvalue set value='19.216667' where id=106814;
+update datasetfieldvalue set value='19.216667' where id=106831;
+update datasetfieldvalue set value='19.216667' where id=106848;
+update datasetfieldvalue set value='69.583333' where id=106852;
+update datasetfieldvalue set value='69.583333' where id=106869;
+update datasetfieldvalue set value='19.216667' where id=106882;
+update datasetfieldvalue set value='69.583333' where id=106886;
+update datasetfieldvalue set value='19.216667' where id=106893;
+update datasetfieldvalue set value='69.583333' where id=106901;
+update datasetfieldvalue set value='19.216667' where id=106906;
+update datasetfieldvalue set value='19.216667' where id=106927;
+update datasetfieldvalue set value='69.583333' where id=106935;
+update datasetfieldvalue set value='69.583333' where id=106939;
+update datasetfieldvalue set value='19.216667' where id=106965;
+update datasetfieldvalue set value='19.216667' where id=106972;
+update datasetfieldvalue set value='69.583333' where id=106988;
+update datasetfieldvalue set value='69.583333' where id=106997;
+update datasetfieldvalue set value='19.216667' where id=107001;
+update datasetfieldvalue set value='19.216667' where id=107019;
+update datasetfieldvalue set value='19.216667' where id=107025;
+update datasetfieldvalue set value='69.583333' where id=107030;
+update datasetfieldvalue set value='69.583333' where id=107047;
+update datasetfieldvalue set value='69.583333' where id=107054;
+update datasetfieldvalue set value='19.216667' where id=107076;
+update datasetfieldvalue set value='19.216667' where id=107077;
+update datasetfieldvalue set value='69.583333' where id=107081;
+update datasetfieldvalue set value='69.583333' where id=107108;
+update datasetfieldvalue set value='19.216667' where id=107111;
+update datasetfieldvalue set value='19.216667' where id=107112;
+update datasetfieldvalue set value='69.583333' where id=107115;
+update datasetfieldvalue set value='69.583333' where id=107138;
+update datasetfieldvalue set value='19.216667' where id=107142;
+update datasetfieldvalue set value='69.583333' where id=107145;
+update datasetfieldvalue set value='19.216667' where id=107167;
+update datasetfieldvalue set value='69.583333' where id=107198;
+update datasetfieldvalue set value='19.216667' where id=107200;
+update datasetfieldvalue set value='19.216667' where id=107210;
+update datasetfieldvalue set value='69.583333' where id=107214;
+update datasetfieldvalue set value='19.216667' where id=107226;
+update datasetfieldvalue set value='19.216667' where id=107233;
+update datasetfieldvalue set value='69.583333' where id=107240;
+update datasetfieldvalue set value='69.583333' where id=107253;
+update datasetfieldvalue set value='19.216667' where id=107267;
+update datasetfieldvalue set value='69.583333' where id=107290;
+update datasetfieldvalue set value='19.216667' where id=107293;
+update datasetfieldvalue set value='69.583333' where id=107296;
+update datasetfieldvalue set value='19.216667' where id=107312;
+update datasetfieldvalue set value='69.583333' where id=107313;
+update datasetfieldvalue set value='19.216667' where id=107327;
+update datasetfieldvalue set value='69.583333' where id=107338;
+update datasetfieldvalue set value='69.583333' where id=107349;
+update datasetfieldvalue set value='19.216667' where id=107350;
+update datasetfieldvalue set value='19.216667' where id=107372;
+update datasetfieldvalue set value='69.583333' where id=107377;
+update datasetfieldvalue set value='69.583333' where id=107403;
+update datasetfieldvalue set value='19.216667' where id=107410;
+update datasetfieldvalue set value='19.216667' where id=107413;
+update datasetfieldvalue set value='69.583333' where id=107419;
+update datasetfieldvalue set value='69.583333' where id=107439;
+update datasetfieldvalue set value='69.583333' where id=107445;
+update datasetfieldvalue set value='19.216667' where id=107450;
+update datasetfieldvalue set value='19.216667' where id=107465;
+update datasetfieldvalue set value='19.216667' where id=107481;
+update datasetfieldvalue set value='19.216667' where id=107483;
+update datasetfieldvalue set value='69.583333' where id=107496;
+update datasetfieldvalue set value='69.583333' where id=107505;
+update datasetfieldvalue set value='69.583333' where id=107518;
+update datasetfieldvalue set value='19.216667' where id=107523;
+update datasetfieldvalue set value='69.583333' where id=107543;
+update datasetfieldvalue set value='19.216667' where id=107548;
+update datasetfieldvalue set value='19.216667' where id=107560;
+update datasetfieldvalue set value='19.216667' where id=107568;
+update datasetfieldvalue set value='69.583333' where id=107571;
+update datasetfieldvalue set value='69.583333' where id=107572;
+update datasetfieldvalue set value='19.216667' where id=107601;
+update datasetfieldvalue set value='69.583333' where id=107618;
+update datasetfieldvalue set value='69.583333' where id=107629;
+update datasetfieldvalue set value='19.216667' where id=107637;
+update datasetfieldvalue set value='69.583333' where id=107644;
+update datasetfieldvalue set value='19.216667' where id=107653;
+update datasetfieldvalue set value='69.583333' where id=107654;
+update datasetfieldvalue set value='19.216667' where id=107679;
+update datasetfieldvalue set value='69.583333' where id=107684;
+update datasetfieldvalue set value='19.216667' where id=107692;
+update datasetfieldvalue set value='19.216667' where id=107695;
+update datasetfieldvalue set value='69.583333' where id=107706;
+update datasetfieldvalue set value='19.216667' where id=107725;
+update datasetfieldvalue set value='69.583333' where id=107727;
+update datasetfieldvalue set value='19.216667' where id=107750;
+update datasetfieldvalue set value='69.583333' where id=107759;
+update datasetfieldvalue set value='69.583333' where id=107768;
+update datasetfieldvalue set value='19.216667' where id=107771;
+update datasetfieldvalue set value='69.583333' where id=107803;
+update datasetfieldvalue set value='19.216667' where id=107804;
+update datasetfieldvalue set value='69.583333' where id=107813;
+update datasetfieldvalue set value='69.583333' where id=107816;
+update datasetfieldvalue set value='19.216667' where id=107818;
+update datasetfieldvalue set value='19.216667' where id=107824;
+update datasetfieldvalue set value='69.583333' where id=107860;
+update datasetfieldvalue set value='19.216667' where id=107864;
+update datasetfieldvalue set value='19.216667' where id=107875;
+update datasetfieldvalue set value='69.583333' where id=107890;
+update datasetfieldvalue set value='69.583333' where id=107908;
+update datasetfieldvalue set value='19.216667' where id=107920;
+update datasetfieldvalue set value='19.216667' where id=107923;
+update datasetfieldvalue set value='69.583333' where id=107933;
+update datasetfieldvalue set value='69.583333' where id=107936;
+update datasetfieldvalue set value='19.216667' where id=107939;
+update datasetfieldvalue set value='19.216667' where id=107941;
+update datasetfieldvalue set value='69.583333' where id=107968;
+update datasetfieldvalue set value='19.216667' where id=107983;
+update datasetfieldvalue set value='69.583333' where id=107990;
+update datasetfieldvalue set value='69.583333' where id=108000;
+update datasetfieldvalue set value='19.216667' where id=108017;
+update datasetfieldvalue set value='19.216667' where id=108037;
+update datasetfieldvalue set value='19.216667' where id=108039;
+update datasetfieldvalue set value='69.583333' where id=108045;
+update datasetfieldvalue set value='69.583333' where id=108051;
+update datasetfieldvalue set value='19.216667' where id=108062;
+update datasetfieldvalue set value='19.216667' where id=108063;
+update datasetfieldvalue set value='69.583333' where id=108086;
+update datasetfieldvalue set value='69.583333' where id=108100;
+update datasetfieldvalue set value='69.583333' where id=108124;
+update datasetfieldvalue set value='19.216667' where id=108129;
+update datasetfieldvalue set value='19.216667' where id=108131;
+update datasetfieldvalue set value='69.583333' where id=108140;
+update datasetfieldvalue set value='19.216667' where id=108150;
+update datasetfieldvalue set value='69.583333' where id=108152;
+update datasetfieldvalue set value='19.216667' where id=108175;
+update datasetfieldvalue set value='69.583333' where id=108185;
+update datasetfieldvalue set value='69.583333' where id=108186;
+update datasetfieldvalue set value='19.216667' where id=108190;
+update datasetfieldvalue set value='69.583333' where id=108196;
+update datasetfieldvalue set value='19.216667' where id=108223;
+update datasetfieldvalue set value='19.216667' where id=108238;
+update datasetfieldvalue set value='69.583333' where id=108242;
+update datasetfieldvalue set value='69.583333' where id=108258;
+update datasetfieldvalue set value='19.216667' where id=108267;
+update datasetfieldvalue set value='69.583333' where id=108274;
+update datasetfieldvalue set value='19.216667' where id=108285;
+update datasetfieldvalue set value='19.216667' where id=108303;
+update datasetfieldvalue set value='69.583333' where id=108304;
+update datasetfieldvalue set value='19.216667' where id=108326;
+update datasetfieldvalue set value='69.583333' where id=108329;
+update datasetfieldvalue set value='69.583333' where id=108330;
+update datasetfieldvalue set value='19.216667' where id=108336;
+update datasetfieldvalue set value='19.216667' where id=108367;
+update datasetfieldvalue set value='69.583333' where id=108378;
+update datasetfieldvalue set value='69.583333' where id=108383;
+update datasetfieldvalue set value='19.216667' where id=108395;
+update datasetfieldvalue set value='69.583333' where id=108405;
+update datasetfieldvalue set value='69.583333' where id=108410;
+update datasetfieldvalue set value='19.216667' where id=108425;
+update datasetfieldvalue set value='19.216667' where id=108429;
+update datasetfieldvalue set value='19.216667' where id=108440;
+update datasetfieldvalue set value='69.583333' where id=108443;
+update datasetfieldvalue set value='19.216667' where id=108452;
+update datasetfieldvalue set value='69.583333' where id=108468;
+update datasetfieldvalue set value='19.216667' where id=108482;
+update datasetfieldvalue set value='19.216667' where id=108487;
+update datasetfieldvalue set value='69.583333' where id=108495;
+update datasetfieldvalue set value='69.583333' where id=108520;
+update datasetfieldvalue set value='19.216667' where id=108523;
+update datasetfieldvalue set value='69.583333' where id=108536;
+update datasetfieldvalue set value='19.216667' where id=108549;
+update datasetfieldvalue set value='69.583333' where id=108558;
+update datasetfieldvalue set value='69.583333' where id=108572;
+update datasetfieldvalue set value='19.216667' where id=108587;
+update datasetfieldvalue set value='19.216667' where id=108594;
+update datasetfieldvalue set value='69.583333' where id=108602;
+update datasetfieldvalue set value='19.216667' where id=108614;
+update datasetfieldvalue set value='69.583333' where id=108636;
+update datasetfieldvalue set value='19.216667' where id=108639;
+update datasetfieldvalue set value='69.583333' where id=108645;
+update datasetfieldvalue set value='19.216667' where id=108653;
+update datasetfieldvalue set value='69.583333' where id=108654;
+update datasetfieldvalue set value='69.583333' where id=108672;
+update datasetfieldvalue set value='19.216667' where id=108674;
+update datasetfieldvalue set value='69.583333' where id=108691;
+update datasetfieldvalue set value='19.216667' where id=108712;
+update datasetfieldvalue set value='69.583333' where id=108717;
+update datasetfieldvalue set value='19.216667' where id=108723;
+update datasetfieldvalue set value='69.583333' where id=108736;
+update datasetfieldvalue set value='19.216667' where id=108743;
+update datasetfieldvalue set value='69.583333' where id=108752;
+update datasetfieldvalue set value='19.216667' where id=108769;
+update datasetfieldvalue set value='19.216667' where id=108805;
+update datasetfieldvalue set value='19.216667' where id=108811;
+update datasetfieldvalue set value='69.583333' where id=108812;
+update datasetfieldvalue set value='69.583333' where id=108815;
+update datasetfieldvalue set value='69.583333' where id=108826;
+update datasetfieldvalue set value='19.216667' where id=108829;
+update datasetfieldvalue set value='69.583333' where id=108841;
+update datasetfieldvalue set value='19.216667' where id=108842;
+update datasetfieldvalue set value='19.216667' where id=108875;
+update datasetfieldvalue set value='69.583333' where id=108877;
+update datasetfieldvalue set value='19.216667' where id=108878;
+update datasetfieldvalue set value='69.583333' where id=108883;
+update datasetfieldvalue set value='69.583333' where id=108902;
+update datasetfieldvalue set value='19.216667' where id=108903;
+update datasetfieldvalue set value='69.583333' where id=108908;
+update datasetfieldvalue set value='19.216667' where id=108913;
+update datasetfieldvalue set value='19.216667' where id=108942;
+update datasetfieldvalue set value='69.583333' where id=108947;
+update datasetfieldvalue set value='69.583333' where id=108950;
+update datasetfieldvalue set value='19.216667' where id=108982;
+update datasetfieldvalue set value='69.583333' where id=109005;
+update datasetfieldvalue set value='19.216667' where id=109011;
+update datasetfieldvalue set value='19.216667' where id=109012;
+update datasetfieldvalue set value='69.583333' where id=109014;
+update datasetfieldvalue set value='69.583333' where id=109032;
+update datasetfieldvalue set value='69.583333' where id=109053;
+update datasetfieldvalue set value='19.216667' where id=109055;
+update datasetfieldvalue set value='19.216667' where id=109056;
+update datasetfieldvalue set value='69.583333' where id=109079;
+update datasetfieldvalue set value='69.583333' where id=109089;
+update datasetfieldvalue set value='19.216667' where id=109098;
+update datasetfieldvalue set value='19.216667' where id=109102;
+update datasetfieldvalue set value='19.216667' where id=109112;
+update datasetfieldvalue set value='69.583333' where id=109125;
+update datasetfieldvalue set value='69.583333' where id=109128;
+update datasetfieldvalue set value='19.216667' where id=109129;
+update datasetfieldvalue set value='69.583333' where id=109166;
+update datasetfieldvalue set value='19.216667' where id=109169;
+update datasetfieldvalue set value='19.216667' where id=109174;
+update datasetfieldvalue set value='69.583333' where id=109188;
+update datasetfieldvalue set value='69.583333' where id=109202;
+update datasetfieldvalue set value='19.216667' where id=109217;
+update datasetfieldvalue set value='19.216667' where id=109220;
+update datasetfieldvalue set value='69.583333' where id=109224;
+update datasetfieldvalue set value='69.583333' where id=109248;
+update datasetfieldvalue set value='19.216667' where id=109257;
+update datasetfieldvalue set value='69.583333' where id=109258;
+update datasetfieldvalue set value='19.216667' where id=109271;
+update datasetfieldvalue set value='19.216667' where id=109294;
+update datasetfieldvalue set value='69.583333' where id=109301;
+update datasetfieldvalue set value='19.216667' where id=109315;
+update datasetfieldvalue set value='69.583333' where id=109318;
+update datasetfieldvalue set value='69.583333' where id=109325;
+update datasetfieldvalue set value='19.216667' where id=109338;
+update datasetfieldvalue set value='69.583333' where id=109354;
+update datasetfieldvalue set value='19.216667' where id=109357;
+update datasetfieldvalue set value='19.216667' where id=109371;
+update datasetfieldvalue set value='69.583333' where id=109375;
+update datasetfieldvalue set value='69.583333' where id=109380;
+update datasetfieldvalue set value='19.216667' where id=109385;
+update datasetfieldvalue set value='19.216667' where id=109407;
+update datasetfieldvalue set value='69.583333' where id=109408;
+update datasetfieldvalue set value='19.216667' where id=109424;
+update datasetfieldvalue set value='69.583333' where id=109444;
+update datasetfieldvalue set value='19.216667' where id=109459;
+update datasetfieldvalue set value='69.583333' where id=109475;
+update datasetfieldvalue set value='19.216667' where id=109482;
+update datasetfieldvalue set value='69.583333' where id=109486;
+update datasetfieldvalue set value='69.583333' where id=109488;
+update datasetfieldvalue set value='19.216667' where id=109493;
+update datasetfieldvalue set value='19.216667' where id=109512;
+update datasetfieldvalue set value='69.583333' where id=109515;
+update datasetfieldvalue set value='19.216667' where id=109534;
+update datasetfieldvalue set value='19.216667' where id=109544;
+update datasetfieldvalue set value='69.583333' where id=109555;
+update datasetfieldvalue set value='69.583333' where id=109558;
+update datasetfieldvalue set value='69.583333' where id=109579;
+update datasetfieldvalue set value='69.583333' where id=109588;
+update datasetfieldvalue set value='19.216667' where id=109600;
+update datasetfieldvalue set value='19.216667' where id=109613;
+update datasetfieldvalue set value='19.216667' where id=109625;
+update datasetfieldvalue set value='19.216667' where id=109634;
+update datasetfieldvalue set value='69.583333' where id=109638;
+update datasetfieldvalue set value='69.583333' where id=109654;
+update datasetfieldvalue set value='69.583333' where id=109669;
+update datasetfieldvalue set value='19.216667' where id=109675;
+update datasetfieldvalue set value='19.216667' where id=109682;
+update datasetfieldvalue set value='69.583333' where id=109697;
+update datasetfieldvalue set value='19.216667' where id=109709;
+update datasetfieldvalue set value='69.583333' where id=109728;
+update datasetfieldvalue set value='19.216667' where id=109729;
+update datasetfieldvalue set value='69.583333' where id=109734;
+update datasetfieldvalue set value='69.583333' where id=109741;
+update datasetfieldvalue set value='19.216667' where id=109750;
+update datasetfieldvalue set value='69.583333' where id=109763;
+update datasetfieldvalue set value='19.216667' where id=109779;
+update datasetfieldvalue set value='19.216667' where id=109789;
+update datasetfieldvalue set value='69.583333' where id=109795;
+update datasetfieldvalue set value='69.583333' where id=109807;
+update datasetfieldvalue set value='19.216667' where id=109818;
+update datasetfieldvalue set value='69.583333' where id=109831;
+update datasetfieldvalue set value='69.583333' where id=109845;
+update datasetfieldvalue set value='19.216667' where id=109852;
+update datasetfieldvalue set value='19.216667' where id=109861;
+update datasetfieldvalue set value='69.583333' where id=109867;
+update datasetfieldvalue set value='19.216667' where id=109871;
+update datasetfieldvalue set value='69.583333' where id=109873;
+update datasetfieldvalue set value='19.216667' where id=109883;
+update datasetfieldvalue set value='19.216667' where id=109923;
+update datasetfieldvalue set value='69.583333' where id=109932;
+update datasetfieldvalue set value='69.583333' where id=109933;
+update datasetfieldvalue set value='19.216667' where id=109937;
+update datasetfieldvalue set value='69.583333' where id=109953;
+update datasetfieldvalue set value='19.216667' where id=109955;
+update datasetfieldvalue set value='69.583333' where id=109957;
+update datasetfieldvalue set value='19.216667' where id=109988;
+update datasetfieldvalue set value='19.216667' where id=109994;
+update datasetfieldvalue set value='69.583333' where id=109995;
+update datasetfieldvalue set value='69.583333' where id=110020;
+update datasetfieldvalue set value='19.216667' where id=110031;
+update datasetfieldvalue set value='19.216667' where id=110034;
+update datasetfieldvalue set value='19.216667' where id=110044;
+update datasetfieldvalue set value='69.583333' where id=110053;
+update datasetfieldvalue set value='69.583333' where id=110074;
+update datasetfieldvalue set value='19.216667' where id=110079;
+update datasetfieldvalue set value='19.216667' where id=110092;
+update datasetfieldvalue set value='69.583333' where id=110105;
+update datasetfieldvalue set value='69.583333' where id=110115;
+update datasetfieldvalue set value='69.583333' where id=110129;
+update datasetfieldvalue set value='69.583333' where id=110137;
+update datasetfieldvalue set value='19.216667' where id=110152;
+update datasetfieldvalue set value='19.216667' where id=110157;
+update datasetfieldvalue set value='19.216667' where id=110170;
+update datasetfieldvalue set value='69.583333' where id=110181;
+update datasetfieldvalue set value='69.583333' where id=110182;
+update datasetfieldvalue set value='19.216667' where id=110201;
+update datasetfieldvalue set value='69.583333' where id=110209;
+update datasetfieldvalue set value='19.216667' where id=110213;
+update datasetfieldvalue set value='69.583333' where id=110219;
+update datasetfieldvalue set value='19.216667' where id=110226;
+update datasetfieldvalue set value='69.583333' where id=110270;
+update datasetfieldvalue set value='19.216667' where id=110271;
+update datasetfieldvalue set value='19.216667' where id=110275;
+update datasetfieldvalue set value='69.583333' where id=110281;
+update datasetfieldvalue set value='69.583333' where id=110304;
+update datasetfieldvalue set value='19.216667' where id=110308;
+update datasetfieldvalue set value='19.216667' where id=110311;
+update datasetfieldvalue set value='69.583333' where id=110316;
+update datasetfieldvalue set value='69.583333' where id=110330;
+update datasetfieldvalue set value='19.216667' where id=110356;
+update datasetfieldvalue set value='69.583333' where id=110363;
+update datasetfieldvalue set value='19.216667' where id=110365;
+update datasetfieldvalue set value='69.583333' where id=110378;
+update datasetfieldvalue set value='19.216667' where id=110388;
+update datasetfieldvalue set value='69.583333' where id=110392;
+update datasetfieldvalue set value='19.216667' where id=110410;
+update datasetfieldvalue set value='19.216667' where id=110421;
+update datasetfieldvalue set value='69.583333' where id=110441;
+update datasetfieldvalue set value='19.216667' where id=110447;
+update datasetfieldvalue set value='69.583333' where id=110449;
+update datasetfieldvalue set value='19.216667' where id=110460;
+update datasetfieldvalue set value='69.583333' where id=110463;
+update datasetfieldvalue set value='19.216667' where id=110472;
+update datasetfieldvalue set value='69.583333' where id=110485;
+update datasetfieldvalue set value='19.216667' where id=110498;
+update datasetfieldvalue set value='69.583333' where id=110499;
+update datasetfieldvalue set value='19.216667' where id=110519;
+update datasetfieldvalue set value='69.583333' where id=110528;
+update datasetfieldvalue set value='69.583333' where id=110553;
+update datasetfieldvalue set value='19.216667' where id=110558;
+update datasetfieldvalue set value='69.583333' where id=110563;
+update datasetfieldvalue set value='19.216667' where id=110572;
+update datasetfieldvalue set value='69.583333' where id=110588;
+update datasetfieldvalue set value='19.216667' where id=110600;
+update datasetfieldvalue set value='19.216667' where id=110601;
+update datasetfieldvalue set value='69.583333' where id=110619;
+update datasetfieldvalue set value='19.216667' where id=110652;
+update datasetfieldvalue set value='19.216667' where id=110653;
+update datasetfieldvalue set value='69.583333' where id=110655;
+update datasetfieldvalue set value='69.583333' where id=110661;
+update datasetfieldvalue set value='19.216667' where id=110672;
+update datasetfieldvalue set value='69.583333' where id=110681;
+update datasetfieldvalue set value='69.583333' where id=110687;
+update datasetfieldvalue set value='19.216667' where id=110704;
+update datasetfieldvalue set value='69.583333' where id=110716;
+update datasetfieldvalue set value='19.216667' where id=110721;
+update datasetfieldvalue set value='19.216667' where id=110737;
+update datasetfieldvalue set value='69.583333' where id=110746;
+update datasetfieldvalue set value='19.216667' where id=110752;
+update datasetfieldvalue set value='19.216667' where id=110764;
+update datasetfieldvalue set value='69.583333' where id=110772;
+update datasetfieldvalue set value='69.583333' where id=110788;
+update datasetfieldvalue set value='19.216667' where id=110807;
+update datasetfieldvalue set value='19.216667' where id=110811;
+update datasetfieldvalue set value='69.583333' where id=110820;
+update datasetfieldvalue set value='69.583333' where id=110828;
+update datasetfieldvalue set value='69.583333' where id=110833;
+update datasetfieldvalue set value='19.216667' where id=110844;
+update datasetfieldvalue set value='19.216667' where id=110851;
+update datasetfieldvalue set value='69.583333' where id=110853;
+update datasetfieldvalue set value='69.583333' where id=110878;
+update datasetfieldvalue set value='19.216667' where id=110880;
+update datasetfieldvalue set value='69.583333' where id=110904;
+update datasetfieldvalue set value='19.216667' where id=110913;
+update datasetfieldvalue set value='69.583333' where id=110920;
+update datasetfieldvalue set value='19.216667' where id=110924;
+update datasetfieldvalue set value='19.216667' where id=110935;
+update datasetfieldvalue set value='69.583333' where id=110949;
+update datasetfieldvalue set value='19.216667' where id=110965;
+update datasetfieldvalue set value='69.583333' where id=110966;
+update datasetfieldvalue set value='69.583333' where id=110967;
+update datasetfieldvalue set value='19.216667' where id=110999;
+update datasetfieldvalue set value='69.583333' where id=111010;
+update datasetfieldvalue set value='19.216667' where id=111034;
+update datasetfieldvalue set value='69.583333' where id=111040;
+update datasetfieldvalue set value='19.216667' where id=111041;
+update datasetfieldvalue set value='69.583333' where id=111052;
+update datasetfieldvalue set value='19.216667' where id=111061;
+update datasetfieldvalue set value='19.216667' where id=111063;
+update datasetfieldvalue set value='69.583333' where id=111075;
+update datasetfieldvalue set value='69.583333' where id=111098;
+update datasetfieldvalue set value='69.583333' where id=111101;
+update datasetfieldvalue set value='19.216667' where id=111105;
+update datasetfieldvalue set value='19.216667' where id=111123;
+update datasetfieldvalue set value='69.583333' where id=111128;
+update datasetfieldvalue set value='19.216667' where id=111133;
+update datasetfieldvalue set value='19.216667' where id=111145;
+update datasetfieldvalue set value='69.583333' where id=111156;
+update datasetfieldvalue set value='19.216667' where id=111171;
+update datasetfieldvalue set value='69.583333' where id=111185;
+update datasetfieldvalue set value='69.583333' where id=111193;
+update datasetfieldvalue set value='19.216667' where id=111207;
+update datasetfieldvalue set value='19.216667' where id=111226;
+update datasetfieldvalue set value='69.583333' where id=111237;
+update datasetfieldvalue set value='19.216667' where id=111245;
+update datasetfieldvalue set value='69.583333' where id=111249;
+update datasetfieldvalue set value='19.216667' where id=111255;
+update datasetfieldvalue set value='69.583333' where id=111274;
+update datasetfieldvalue set value='69.583333' where id=111275;
+update datasetfieldvalue set value='19.216667' where id=111277;
+update datasetfieldvalue set value='69.583333' where id=111302;
+update datasetfieldvalue set value='19.216667' where id=111310;
+update datasetfieldvalue set value='69.583333' where id=111316;
+update datasetfieldvalue set value='19.216667' where id=111317;
+update datasetfieldvalue set value='69.583333' where id=111336;
+update datasetfieldvalue set value='69.583333' where id=111341;
+update datasetfieldvalue set value='19.216667' where id=111364;
+update datasetfieldvalue set value='19.216667' where id=111376;
+update datasetfieldvalue set value='69.583333' where id=111379;
+update datasetfieldvalue set value='19.216667' where id=111407;
+update datasetfieldvalue set value='19.216667' where id=111412;
+update datasetfieldvalue set value='69.583333' where id=111415;
+update datasetfieldvalue set value='19.216667' where id=111432;
+update datasetfieldvalue set value='69.583333' where id=111452;
+update datasetfieldvalue set value='69.583333' where id=111458;
+update datasetfieldvalue set value='19.216667' where id=111460;
+update datasetfieldvalue set value='69.583333' where id=111463;
+update datasetfieldvalue set value='19.216667' where id=111467;
+update datasetfieldvalue set value='19.216667' where id=111473;
+update datasetfieldvalue set value='69.583333' where id=111494;
+update datasetfieldvalue set value='69.583333' where id=111516;
+update datasetfieldvalue set value='19.216667' where id=111518;
+update datasetfieldvalue set value='69.583333' where id=111519;
+update datasetfieldvalue set value='19.216667' where id=111540;
+update datasetfieldvalue set value='19.216667' where id=111546;
+update datasetfieldvalue set value='69.583333' where id=111554;
+update datasetfieldvalue set value='69.583333' where id=111563;
+update datasetfieldvalue set value='19.216667' where id=111568;
+update datasetfieldvalue set value='19.216667' where id=111594;
+update datasetfieldvalue set value='19.216667' where id=111608;
+update datasetfieldvalue set value='69.583333' where id=111617;
+update datasetfieldvalue set value='69.583333' where id=111628;
+update datasetfieldvalue set value='19.216667' where id=111638;
+update datasetfieldvalue set value='69.583333' where id=111640;
+update datasetfieldvalue set value='19.216667' where id=111660;
+update datasetfieldvalue set value='69.583333' where id=111669;
+update datasetfieldvalue set value='19.216667' where id=111686;
+update datasetfieldvalue set value='69.583333' where id=111692;
+update datasetfieldvalue set value='69.583333' where id=111706;
+update datasetfieldvalue set value='19.216667' where id=111708;
+update datasetfieldvalue set value='69.583333' where id=111717;
+update datasetfieldvalue set value='69.583333' where id=111719;
+update datasetfieldvalue set value='19.216667' where id=111723;
+update datasetfieldvalue set value='19.216667' where id=111755;
+update datasetfieldvalue set value='69.583333' where id=111758;
+update datasetfieldvalue set value='19.216667' where id=111780;
+update datasetfieldvalue set value='69.583333' where id=111785;
+update datasetfieldvalue set value='19.216667' where id=111790;
+update datasetfieldvalue set value='69.583333' where id=111805;
+update datasetfieldvalue set value='69.583333' where id=111807;
+update datasetfieldvalue set value='19.216667' where id=111833;
+update datasetfieldvalue set value='19.216667' where id=111839;
+update datasetfieldvalue set value='69.583333' where id=111851;
+update datasetfieldvalue set value='19.216667' where id=111864;
+update datasetfieldvalue set value='19.216667' where id=111869;
+update datasetfieldvalue set value='69.583333' where id=111870;
+update datasetfieldvalue set value='69.583333' where id=111884;
+update datasetfieldvalue set value='19.216667' where id=111910;
+update datasetfieldvalue set value='69.583333' where id=111918;
+update datasetfieldvalue set value='19.216667' where id=111922;
+update datasetfieldvalue set value='69.583333' where id=111932;
+update datasetfieldvalue set value='69.583333' where id=111934;
+update datasetfieldvalue set value='19.216667' where id=111943;
+update datasetfieldvalue set value='19.216667' where id=111953;
+update datasetfieldvalue set value='69.583333' where id=111973;
+update datasetfieldvalue set value='19.216667' where id=111983;
+update datasetfieldvalue set value='69.583333' where id=111992;
+update datasetfieldvalue set value='19.216667' where id=111994;
+update datasetfieldvalue set value='69.583333' where id=112026;
+update datasetfieldvalue set value='69.583333' where id=112027;
+update datasetfieldvalue set value='19.216667' where id=112034;
+update datasetfieldvalue set value='19.216667' where id=112048;
+update datasetfieldvalue set value='69.583333' where id=112054;
+update datasetfieldvalue set value='19.216667' where id=112057;
+update datasetfieldvalue set value='19.216667' where id=112081;
+update datasetfieldvalue set value='69.583333' where id=112086;
+update datasetfieldvalue set value='19.216667' where id=112107;
+update datasetfieldvalue set value='19.216667' where id=112109;
+update datasetfieldvalue set value='69.583333' where id=112118;
+update datasetfieldvalue set value='69.583333' where id=112123;
+update datasetfieldvalue set value='19.216667' where id=112145;
+update datasetfieldvalue set value='69.583333' where id=112155;
+update datasetfieldvalue set value='19.216667' where id=112163;
+update datasetfieldvalue set value='69.583333' where id=112170;
+update datasetfieldvalue set value='19.216667' where id=112179;
+update datasetfieldvalue set value='19.216667' where id=112184;
+update datasetfieldvalue set value='69.583333' where id=112201;
+update datasetfieldvalue set value='69.583333' where id=112210;
+update datasetfieldvalue set value='19.216667' where id=112224;
+update datasetfieldvalue set value='19.216667' where id=112234;
+update datasetfieldvalue set value='69.583333' where id=112247;
+update datasetfieldvalue set value='69.583333' where id=112249;
+update datasetfieldvalue set value='69.583333' where id=112269;
+update datasetfieldvalue set value='19.216667' where id=112277;
+update datasetfieldvalue set value='69.583333' where id=112280;
+update datasetfieldvalue set value='19.216667' where id=112282;
+update datasetfieldvalue set value='19.216667' where id=112308;
+update datasetfieldvalue set value='69.583333' where id=112312;
+update datasetfieldvalue set value='19.216667' where id=112333;
+update datasetfieldvalue set value='69.583333' where id=112336;
+update datasetfieldvalue set value='69.583333' where id=112345;
+update datasetfieldvalue set value='19.216667' where id=112351;
+update datasetfieldvalue set value='69.583333' where id=112362;
+update datasetfieldvalue set value='19.216667' where id=112374;
+update datasetfieldvalue set value='19.216667' where id=112386;
+update datasetfieldvalue set value='19.216667' where id=112394;
+update datasetfieldvalue set value='69.583333' where id=112408;
+update datasetfieldvalue set value='69.583333' where id=112415;
+update datasetfieldvalue set value='19.216667' where id=112447;
+update datasetfieldvalue set value='69.583333' where id=112450;
+update datasetfieldvalue set value='19.216667' where id=112463;
+update datasetfieldvalue set value='69.583333' where id=112467;
+update datasetfieldvalue set value='19.216667' where id=112476;
+update datasetfieldvalue set value='69.583333' where id=112477;
+update datasetfieldvalue set value='19.216667' where id=112481;
+update datasetfieldvalue set value='69.583333' where id=112503;
+update datasetfieldvalue set value='69.583333' where id=112525;
+update datasetfieldvalue set value='19.216667' where id=112528;
+update datasetfieldvalue set value='19.216667' where id=112531;
+update datasetfieldvalue set value='69.583333' where id=112539;
+update datasetfieldvalue set value='69.583333' where id=112554;
+update datasetfieldvalue set value='69.583333' where id=112574;
+update datasetfieldvalue set value='19.216667' where id=112588;
+update datasetfieldvalue set value='19.216667' where id=112592;
+update datasetfieldvalue set value='69.583333' where id=112609;
+update datasetfieldvalue set value='19.216667' where id=112624;
+update datasetfieldvalue set value='69.583333' where id=112625;
+update datasetfieldvalue set value='19.216667' where id=112634;
+update datasetfieldvalue set value='69.583333' where id=112659;
+update datasetfieldvalue set value='69.583333' where id=112660;
+update datasetfieldvalue set value='19.216667' where id=112666;
+update datasetfieldvalue set value='19.216667' where id=112678;
+update datasetfieldvalue set value='19.216667' where id=112690;
+update datasetfieldvalue set value='19.216667' where id=112704;
+update datasetfieldvalue set value='69.583333' where id=112705;
+update datasetfieldvalue set value='69.583333' where id=112718;
+update datasetfieldvalue set value='19.216667' where id=112724;
+update datasetfieldvalue set value='69.583333' where id=112725;
+update datasetfieldvalue set value='69.583333' where id=112757;
+update datasetfieldvalue set value='19.216667' where id=112758;
+update datasetfieldvalue set value='69.583333' where id=112766;
+update datasetfieldvalue set value='69.583333' where id=112774;
+update datasetfieldvalue set value='19.216667' where id=112796;
+update datasetfieldvalue set value='19.216667' where id=112802;
+update datasetfieldvalue set value='19.216667' where id=112808;
+update datasetfieldvalue set value='19.216667' where id=112830;
+update datasetfieldvalue set value='69.583333' where id=112833;
+update datasetfieldvalue set value='69.583333' where id=112834;
+update datasetfieldvalue set value='19.216667' where id=112848;
+update datasetfieldvalue set value='69.583333' where id=112850;
+update datasetfieldvalue set value='69.583333' where id=112868;
+update datasetfieldvalue set value='19.216667' where id=112874;
+update datasetfieldvalue set value='19.216667' where id=112897;
+update datasetfieldvalue set value='19.216667' where id=112904;
+update datasetfieldvalue set value='69.583333' where id=112911;
+update datasetfieldvalue set value='69.583333' where id=112913;
+update datasetfieldvalue set value='69.583333' where id=112950;
+update datasetfieldvalue set value='19.216667' where id=112953;
+update datasetfieldvalue set value='19.216667' where id=112966;
+update datasetfieldvalue set value='69.583333' where id=112970;
+update datasetfieldvalue set value='19.216667' where id=112980;
+update datasetfieldvalue set value='69.583333' where id=112984;
+update datasetfieldvalue set value='69.583333' where id=112991;
+update datasetfieldvalue set value='19.216667' where id=113000;
+update datasetfieldvalue set value='19.216667' where id=113027;
+update datasetfieldvalue set value='69.583333' where id=113043;
+update datasetfieldvalue set value='19.216667' where id=113046;
+update datasetfieldvalue set value='69.583333' where id=113048;
+update datasetfieldvalue set value='69.583333' where id=113062;
+update datasetfieldvalue set value='19.216667' where id=113070;
+update datasetfieldvalue set value='19.216667' where id=113095;
+update datasetfieldvalue set value='69.583333' where id=113096;
+update datasetfieldvalue set value='69.583333' where id=113112;
+update datasetfieldvalue set value='19.216667' where id=113119;
+update datasetfieldvalue set value='19.216667' where id=113126;
+update datasetfieldvalue set value='69.583333' where id=113132;
+update datasetfieldvalue set value='19.216667' where id=113145;
+update datasetfieldvalue set value='69.583333' where id=113149;
+update datasetfieldvalue set value='19.216667' where id=113177;
+update datasetfieldvalue set value='69.583333' where id=113180;
+update datasetfieldvalue set value='19.216667' where id=113184;
+update datasetfieldvalue set value='19.216667' where id=113192;
+update datasetfieldvalue set value='69.583333' where id=113195;
+update datasetfieldvalue set value='69.583333' where id=113215;
+update datasetfieldvalue set value='19.216667' where id=113245;
+update datasetfieldvalue set value='69.583333' where id=113257;
+update datasetfieldvalue set value='69.583333' where id=113261;
+update datasetfieldvalue set value='19.216667' where id=113262;
+update datasetfieldvalue set value='19.216667' where id=113269;
+update datasetfieldvalue set value='69.583333' where id=113276;
+update datasetfieldvalue set value='19.216667' where id=113293;
+update datasetfieldvalue set value='69.583333' where id=113296;
+update datasetfieldvalue set value='69.583333' where id=113315;
+update datasetfieldvalue set value='19.216667' where id=113316;
+update datasetfieldvalue set value='69.583333' where id=113322;
+update datasetfieldvalue set value='19.216667' where id=113342;
+update datasetfieldvalue set value='69.583333' where id=113354;
+update datasetfieldvalue set value='19.216667' where id=113358;
+update datasetfieldvalue set value='19.216667' where id=113363;
+update datasetfieldvalue set value='69.583333' where id=113387;
+update datasetfieldvalue set value='19.216667' where id=113396;
+update datasetfieldvalue set value='69.583333' where id=113405;
+update datasetfieldvalue set value='69.583333' where id=113425;
+update datasetfieldvalue set value='19.216667' where id=113426;
+update datasetfieldvalue set value='69.583333' where id=113439;
+update datasetfieldvalue set value='19.216667' where id=113440;
+update datasetfieldvalue set value='19.216667' where id=113454;
+update datasetfieldvalue set value='69.583333' where id=113471;
+update datasetfieldvalue set value='69.583333' where id=113479;
+update datasetfieldvalue set value='19.216667' where id=113499;
+update datasetfieldvalue set value='69.583333' where id=113508;
+update datasetfieldvalue set value='19.216667' where id=113511;
+update datasetfieldvalue set value='19.216667' where id=113521;
+update datasetfieldvalue set value='69.583333' where id=113524;
+update datasetfieldvalue set value='69.583333' where id=113536;
+update datasetfieldvalue set value='19.216667' where id=113540;
+update datasetfieldvalue set value='69.583333' where id=113562;
+update datasetfieldvalue set value='19.216667' where id=113573;
+update datasetfieldvalue set value='69.583333' where id=113592;
+update datasetfieldvalue set value='19.216667' where id=113599;
+update datasetfieldvalue set value='69.583333' where id=113607;
+update datasetfieldvalue set value='19.216667' where id=113616;
+update datasetfieldvalue set value='69.583333' where id=113618;
+update datasetfieldvalue set value='19.216667' where id=113643;
+update datasetfieldvalue set value='69.583333' where id=113646;
+update datasetfieldvalue set value='19.216667' where id=113649;
+update datasetfieldvalue set value='19.216667' where id=113661;
+update datasetfieldvalue set value='69.583333' where id=113685;
+update datasetfieldvalue set value='19.216667' where id=113710;
+update datasetfieldvalue set value='69.583333' where id=113713;
+update datasetfieldvalue set value='69.583333' where id=113714;
+update datasetfieldvalue set value='19.216667' where id=113723;
+update datasetfieldvalue set value='69.583333' where id=113732;
+update datasetfieldvalue set value='19.216667' where id=113737;
+update datasetfieldvalue set value='69.583333' where id=113739;
+update datasetfieldvalue set value='19.216667' where id=113761;
+update datasetfieldvalue set value='19.216667' where id=113777;
+update datasetfieldvalue set value='19.216667' where id=113790;
+update datasetfieldvalue set value='69.583333' where id=113794;
+update datasetfieldvalue set value='69.583333' where id=113808;
+update datasetfieldvalue set value='19.216667' where id=113817;
+update datasetfieldvalue set value='69.583333' where id=113823;
+update datasetfieldvalue set value='69.583333' where id=113828;
+update datasetfieldvalue set value='19.216667' where id=113834;
+update datasetfieldvalue set value='19.216667' where id=113867;
+update datasetfieldvalue set value='69.583333' where id=113877;
+update datasetfieldvalue set value='69.583333' where id=113890;
+update datasetfieldvalue set value='19.216667' where id=113891;
+update datasetfieldvalue set value='19.216667' where id=113908;
+update datasetfieldvalue set value='19.216667' where id=113923;
+update datasetfieldvalue set value='69.583333' where id=113930;
+update datasetfieldvalue set value='69.583333' where id=113938;
+update datasetfieldvalue set value='69.583333' where id=113941;
+update datasetfieldvalue set value='69.583333' where id=113969;
+update datasetfieldvalue set value='19.216667' where id=113974;
+update datasetfieldvalue set value='19.216667' where id=113979;
+update datasetfieldvalue set value='19.216667' where id=113999;
+update datasetfieldvalue set value='19.216667' where id=114008;
+update datasetfieldvalue set value='69.583333' where id=114014;
+update datasetfieldvalue set value='69.583333' where id=114015;
+update datasetfieldvalue set value='19.216667' where id=114034;
+update datasetfieldvalue set value='69.583333' where id=114049;
+update datasetfieldvalue set value='69.583333' where id=114053;
+update datasetfieldvalue set value='19.216667' where id=114060;
+update datasetfieldvalue set value='19.216667' where id=114075;
+update datasetfieldvalue set value='69.583333' where id=114079;
+update datasetfieldvalue set value='19.216667' where id=114090;
+update datasetfieldvalue set value='69.583333' where id=114092;
+update datasetfieldvalue set value='69.583333' where id=114110;
+update datasetfieldvalue set value='19.216667' where id=114119;
+update datasetfieldvalue set value='69.583333' where id=114123;
+update datasetfieldvalue set value='19.216667' where id=114128;
+update datasetfieldvalue set value='69.583333' where id=114156;
+update datasetfieldvalue set value='69.583333' where id=114161;
+update datasetfieldvalue set value='19.216667' where id=114166;
+update datasetfieldvalue set value='19.216667' where id=114190;
+update datasetfieldvalue set value='69.583333' where id=114194;
+update datasetfieldvalue set value='19.216667' where id=114197;
+update datasetfieldvalue set value='19.216667' where id=114222;
+update datasetfieldvalue set value='69.583333' where id=114227;
+update datasetfieldvalue set value='19.216667' where id=114234;
+update datasetfieldvalue set value='69.583333' where id=114258;
+update datasetfieldvalue set value='69.583333' where id=114260;
+update datasetfieldvalue set value='19.216667' where id=114263;
+update datasetfieldvalue set value='69.583333' where id=114288;
+update datasetfieldvalue set value='69.583333' where id=114301;
+update datasetfieldvalue set value='19.216667' where id=114303;
+update datasetfieldvalue set value='19.216667' where id=114314;
+update datasetfieldvalue set value='69.583333' where id=114320;
+update datasetfieldvalue set value='19.216667' where id=114323;
+update datasetfieldvalue set value='19.216667' where id=114330;
+update datasetfieldvalue set value='69.583333' where id=114355;
+update datasetfieldvalue set value='69.583333' where id=114372;
+update datasetfieldvalue set value='69.583333' where id=114384;
+update datasetfieldvalue set value='19.216667' where id=114386;
+update datasetfieldvalue set value='19.216667' where id=114388;
+update datasetfieldvalue set value='19.216667' where id=114406;
+update datasetfieldvalue set value='69.583333' where id=114418;
+update datasetfieldvalue set value='19.216667' where id=114423;
+update datasetfieldvalue set value='69.583333' where id=114435;
+update datasetfieldvalue set value='19.216667' where id=114454;
+update datasetfieldvalue set value='19.216667' where id=114456;
+update datasetfieldvalue set value='69.583333' where id=114473;
+update datasetfieldvalue set value='69.583333' where id=114477;
+update datasetfieldvalue set value='69.583333' where id=114507;
+update datasetfieldvalue set value='19.216667' where id=114519;
+update datasetfieldvalue set value='69.583333' where id=114520;
+update datasetfieldvalue set value='19.216667' where id=114523;
+update datasetfieldvalue set value='19.216667' where id=114540;
+update datasetfieldvalue set value='69.583333' where id=114543;
+update datasetfieldvalue set value='69.583333' where id=114559;
+update datasetfieldvalue set value='19.216667' where id=114569;
+update datasetfieldvalue set value='19.216667' where id=114575;
+update datasetfieldvalue set value='69.583333' where id=114586;
+update datasetfieldvalue set value='69.583333' where id=114595;
+update datasetfieldvalue set value='19.216667' where id=114609;
+update datasetfieldvalue set value='19.216667' where id=114625;
+update datasetfieldvalue set value='69.583333' where id=114629;
+update datasetfieldvalue set value='69.583333' where id=114637;
+update datasetfieldvalue set value='19.216667' where id=114638;
+update datasetfieldvalue set value='69.583333' where id=114662;
+update datasetfieldvalue set value='69.583333' where id=114671;
+update datasetfieldvalue set value='19.216667' where id=114673;
+update datasetfieldvalue set value='19.216667' where id=114682;
+update datasetfieldvalue set value='19.216667' where id=114701;
+update datasetfieldvalue set value='69.583333' where id=114706;
+update datasetfieldvalue set value='69.583333' where id=114725;
+update datasetfieldvalue set value='19.216667' where id=114736;
+update datasetfieldvalue set value='69.583333' where id=114740;
+update datasetfieldvalue set value='69.583333' where id=114755;
+update datasetfieldvalue set value='19.216667' where id=114761;
+update datasetfieldvalue set value='19.216667' where id=114772;
+update datasetfieldvalue set value='19.216667' where id=114789;
+update datasetfieldvalue set value='69.583333' where id=114791;
+update datasetfieldvalue set value='69.583333' where id=114795;
+update datasetfieldvalue set value='19.216667' where id=114802;
+update datasetfieldvalue set value='69.583333' where id=114823;
+update datasetfieldvalue set value='19.216667' where id=114841;
+update datasetfieldvalue set value='69.583333' where id=114843;
+update datasetfieldvalue set value='19.216667' where id=114857;
+update datasetfieldvalue set value='69.583333' where id=114891;
+update datasetfieldvalue set value='69.583333' where id=114899;
+update datasetfieldvalue set value='19.216667' where id=114901;
+update datasetfieldvalue set value='19.216667' where id=114905;
+update datasetfieldvalue set value='69.583333' where id=114926;
+update datasetfieldvalue set value='19.216667' where id=114932;
+update datasetfieldvalue set value='69.583333' where id=114937;
+update datasetfieldvalue set value='19.216667' where id=114942;
+update datasetfieldvalue set value='19.216667' where id=114949;
+update datasetfieldvalue set value='69.583333' where id=114955;
+update datasetfieldvalue set value='69.583333' where id=114967;
+update datasetfieldvalue set value='19.216667' where id=114977;
+update datasetfieldvalue set value='19.216667' where id=114991;
+update datasetfieldvalue set value='69.583333' where id=115001;
+update datasetfieldvalue set value='19.216667' where id=115009;
+update datasetfieldvalue set value='69.583333' where id=115028;
+update datasetfieldvalue set value='19.216667' where id=115034;
+update datasetfieldvalue set value='19.216667' where id=115043;
+update datasetfieldvalue set value='69.583333' where id=115047;
+update datasetfieldvalue set value='69.583333' where id=115067;
+update datasetfieldvalue set value='19.216667' where id=115075;
+update datasetfieldvalue set value='19.216667' where id=115095;
+update datasetfieldvalue set value='69.583333' where id=115099;
+update datasetfieldvalue set value='69.583333' where id=115102;
+update datasetfieldvalue set value='19.216667' where id=115120;
+update datasetfieldvalue set value='69.583333' where id=115123;
+update datasetfieldvalue set value='19.216667' where id=115137;
+update datasetfieldvalue set value='69.583333' where id=115141;
+update datasetfieldvalue set value='19.216667' where id=115162;
+update datasetfieldvalue set value='19.216667' where id=115165;
+update datasetfieldvalue set value='69.583333' where id=115174;
+update datasetfieldvalue set value='69.583333' where id=115181;
+update datasetfieldvalue set value='19.216667' where id=115204;
+update datasetfieldvalue set value='19.216667' where id=115205;
+update datasetfieldvalue set value='69.583333' where id=115206;
+update datasetfieldvalue set value='69.583333' where id=115239;
+update datasetfieldvalue set value='19.216667' where id=115251;
+update datasetfieldvalue set value='19.216667' where id=115270;
+update datasetfieldvalue set value='69.583333' where id=115272;
+update datasetfieldvalue set value='69.583333' where id=115279;
+update datasetfieldvalue set value='19.216667' where id=115287;
+update datasetfieldvalue set value='69.583333' where id=115289;
+update datasetfieldvalue set value='69.583333' where id=115291;
+update datasetfieldvalue set value='19.216667' where id=115298;
+update datasetfieldvalue set value='69.583333' where id=115326;
+update datasetfieldvalue set value='19.216667' where id=115327;
+update datasetfieldvalue set value='69.583333' where id=115348;
+update datasetfieldvalue set value='19.216667' where id=115362;
+update datasetfieldvalue set value='19.216667' where id=115386;
+update datasetfieldvalue set value='69.583333' where id=115406;
+update datasetfieldvalue set value='69.583333' where id=115407;
+update datasetfieldvalue set value='19.216667' where id=115408;
+update datasetfieldvalue set value='19.216667' where id=115410;
+update datasetfieldvalue set value='69.583333' where id=115413;
+update datasetfieldvalue set value='19.216667' where id=115417;
+update datasetfieldvalue set value='69.583333' where id=115447;
+update datasetfieldvalue set value='69.583333' where id=115452;
+update datasetfieldvalue set value='19.216667' where id=115468;
+update datasetfieldvalue set value='69.583333' where id=115474;
+update datasetfieldvalue set value='19.216667' where id=115478;
+update datasetfieldvalue set value='19.216667' where id=115498;
+update datasetfieldvalue set value='19.216667' where id=115499;
+update datasetfieldvalue set value='69.583333' where id=115529;
+update datasetfieldvalue set value='69.583333' where id=115530;
+update datasetfieldvalue set value='69.583333' where id=115538;
+update datasetfieldvalue set value='19.216667' where id=115540;
+update datasetfieldvalue set value='69.583333' where id=115546;
+update datasetfieldvalue set value='19.216667' where id=115570;
+update datasetfieldvalue set value='19.216667' where id=115579;
+update datasetfieldvalue set value='19.216667' where id=115584;
+update datasetfieldvalue set value='69.583333' where id=115600;
+update datasetfieldvalue set value='69.583333' where id=115618;
+update datasetfieldvalue set value='19.216667' where id=115623;
+update datasetfieldvalue set value='19.216667' where id=115630;
+update datasetfieldvalue set value='69.583333' where id=115655;
+update datasetfieldvalue set value='69.583333' where id=115656;
+update datasetfieldvalue set value='69.583333' where id=115682;
+update datasetfieldvalue set value='69.583333' where id=115695;
+update datasetfieldvalue set value='19.216667' where id=115699;
+update datasetfieldvalue set value='19.216667' where id=115702;
+update datasetfieldvalue set value='69.583333' where id=115705;
+update datasetfieldvalue set value='69.583333' where id=115707;
+update datasetfieldvalue set value='19.216667' where id=115727;
+update datasetfieldvalue set value='19.216667' where id=115730;
+update datasetfieldvalue set value='69.583333' where id=115747;
+update datasetfieldvalue set value='69.583333' where id=115751;
+update datasetfieldvalue set value='19.216667' where id=115764;
+update datasetfieldvalue set value='19.216667' where id=115781;
+update datasetfieldvalue set value='69.583333' where id=115791;
+update datasetfieldvalue set value='19.216667' where id=115801;
+update datasetfieldvalue set value='19.216667' where id=115822;
+update datasetfieldvalue set value='69.583333' where id=115825;
+update datasetfieldvalue set value='19.216667' where id=115830;
+update datasetfieldvalue set value='69.583333' where id=115847;
+update datasetfieldvalue set value='69.583333' where id=115849;
+update datasetfieldvalue set value='19.216667' where id=115854;
+update datasetfieldvalue set value='69.583333' where id=115878;
+update datasetfieldvalue set value='69.583333' where id=115882;
+update datasetfieldvalue set value='19.216667' where id=115894;
+update datasetfieldvalue set value='19.216667' where id=115903;
+update datasetfieldvalue set value='19.216667' where id=115933;
+update datasetfieldvalue set value='69.583333' where id=115948;
+update datasetfieldvalue set value='19.216667' where id=115953;
+update datasetfieldvalue set value='69.583333' where id=115954;
+update datasetfieldvalue set value='69.583333' where id=115958;
+update datasetfieldvalue set value='69.583333' where id=115980;
+update datasetfieldvalue set value='19.216667' where id=115981;
+update datasetfieldvalue set value='19.216667' where id=115983;
+update datasetfieldvalue set value='19.216667' where id=116000;
+update datasetfieldvalue set value='69.583333' where id=116002;
+update datasetfieldvalue set value='69.583333' where id=116004;
+update datasetfieldvalue set value='19.216667' where id=116027;
+update datasetfieldvalue set value='19.216667' where id=116052;
+update datasetfieldvalue set value='19.216667' where id=116060;
+update datasetfieldvalue set value='69.583333' where id=116069;
+update datasetfieldvalue set value='69.583333' where id=116072;
+update datasetfieldvalue set value='69.583333' where id=116084;
+update datasetfieldvalue set value='19.216667' where id=116098;
+update datasetfieldvalue set value='69.583333' where id=116111;
+update datasetfieldvalue set value='19.216667' where id=116117;
+update datasetfieldvalue set value='19.216667' where id=116138;
+update datasetfieldvalue set value='69.583333' where id=116144;
+update datasetfieldvalue set value='69.583333' where id=116162;
+update datasetfieldvalue set value='19.216667' where id=116165;
+update datasetfieldvalue set value='19.216667' where id=116168;
+update datasetfieldvalue set value='19.216667' where id=116185;
+update datasetfieldvalue set value='69.583333' where id=116188;
+update datasetfieldvalue set value='69.583333' where id=116206;
+update datasetfieldvalue set value='19.216667' where id=116215;
+update datasetfieldvalue set value='69.583333' where id=116233;
+update datasetfieldvalue set value='69.583333' where id=116235;
+update datasetfieldvalue set value='19.216667' where id=116242;
+update datasetfieldvalue set value='69.583333' where id=116278;
+update datasetfieldvalue set value='69.583333' where id=116280;
+update datasetfieldvalue set value='19.216667' where id=116286;
+update datasetfieldvalue set value='19.216667' where id=116289;
+update datasetfieldvalue set value='69.583333' where id=116297;
+update datasetfieldvalue set value='69.583333' where id=116309;
+update datasetfieldvalue set value='19.216667' where id=116312;
+update datasetfieldvalue set value='19.216667' where id=116327;
+update datasetfieldvalue set value='69.583333' where id=116337;
+update datasetfieldvalue set value='19.216667' where id=116352;
+update datasetfieldvalue set value='19.216667' where id=116370;
+update datasetfieldvalue set value='69.583333' where id=116372;
+update datasetfieldvalue set value='19.216667' where id=116391;
+update datasetfieldvalue set value='69.583333' where id=116400;
+update datasetfieldvalue set value='69.583333' where id=116407;
+update datasetfieldvalue set value='19.216667' where id=116414;
+update datasetfieldvalue set value='19.216667' where id=116422;
+update datasetfieldvalue set value='69.583333' where id=116436;
+update datasetfieldvalue set value='69.583333' where id=116448;
+update datasetfieldvalue set value='19.216667' where id=116459;
+update datasetfieldvalue set value='19.216667' where id=116460;
+update datasetfieldvalue set value='69.583333' where id=116461;
+update datasetfieldvalue set value='19.216667' where id=116464;
+update datasetfieldvalue set value='69.583333' where id=116471;
+update datasetfieldvalue set value='19.216667' where id=116506;
+update datasetfieldvalue set value='19.216667' where id=116513;
+update datasetfieldvalue set value='69.583333' where id=116521;
+update datasetfieldvalue set value='69.583333' where id=116523;
+update datasetfieldvalue set value='19.216667' where id=116561;
+update datasetfieldvalue set value='19.216667' where id=116564;
+update datasetfieldvalue set value='69.583333' where id=116565;
+update datasetfieldvalue set value='69.583333' where id=116585;
+update datasetfieldvalue set value='19.216667' where id=116593;
+update datasetfieldvalue set value='19.216667' where id=116602;
+update datasetfieldvalue set value='69.583333' where id=116619;
+update datasetfieldvalue set value='69.583333' where id=116621;
+update datasetfieldvalue set value='19.216667' where id=116651;
+update datasetfieldvalue set value='69.583333' where id=116652;
+update datasetfieldvalue set value='69.583333' where id=116658;
+update datasetfieldvalue set value='19.216667' where id=116668;
+update datasetfieldvalue set value='69.583333' where id=116680;
+update datasetfieldvalue set value='69.583333' where id=116685;
+update datasetfieldvalue set value='19.216667' where id=116701;
+update datasetfieldvalue set value='19.216667' where id=116710;
+update datasetfieldvalue set value='19.216667' where id=116717;
+update datasetfieldvalue set value='19.216667' where id=116725;
+update datasetfieldvalue set value='69.583333' where id=116734;
+update datasetfieldvalue set value='69.583333' where id=116746;
+update datasetfieldvalue set value='69.583333' where id=116761;
+update datasetfieldvalue set value='19.216667' where id=116764;
+update datasetfieldvalue set value='19.216667' where id=116791;
+update datasetfieldvalue set value='69.583333' where id=116793;
+update datasetfieldvalue set value='19.216667' where id=116809;
+update datasetfieldvalue set value='69.583333' where id=116823;
+update datasetfieldvalue set value='19.216667' where id=116824;
+update datasetfieldvalue set value='69.583333' where id=116836;
+update datasetfieldvalue set value='69.583333' where id=116864;
+update datasetfieldvalue set value='69.583333' where id=116872;
+update datasetfieldvalue set value='19.216667' where id=116875;
+update datasetfieldvalue set value='19.216667' where id=116879;
+update datasetfieldvalue set value='19.216667' where id=116880;
+update datasetfieldvalue set value='19.216667' where id=116903;
+update datasetfieldvalue set value='69.583333' where id=116908;
+update datasetfieldvalue set value='69.583333' where id=116920;
+update datasetfieldvalue set value='69.583333' where id=116926;
+update datasetfieldvalue set value='69.583333' where id=116950;
+update datasetfieldvalue set value='19.216667' where id=116956;
+update datasetfieldvalue set value='19.216667' where id=116958;
+update datasetfieldvalue set value='69.583333' where id=116969;
+update datasetfieldvalue set value='19.216667' where id=116977;
+update datasetfieldvalue set value='69.583333' where id=116992;
+update datasetfieldvalue set value='19.216667' where id=117000;
+update datasetfieldvalue set value='69.583333' where id=117012;
+update datasetfieldvalue set value='19.216667' where id=117016;
+update datasetfieldvalue set value='69.583333' where id=117030;
+update datasetfieldvalue set value='19.216667' where id=117046;
+update datasetfieldvalue set value='69.583333' where id=117052;
+update datasetfieldvalue set value='19.216667' where id=117061;
+update datasetfieldvalue set value='19.216667' where id=117080;
+update datasetfieldvalue set value='69.583333' where id=117084;
+update datasetfieldvalue set value='69.583333' where id=117093;
+update datasetfieldvalue set value='19.216667' where id=117106;
+update datasetfieldvalue set value='19.216667' where id=117109;
+update datasetfieldvalue set value='69.583333' where id=117125;
+update datasetfieldvalue set value='19.216667' where id=117136;
+update datasetfieldvalue set value='69.583333' where id=117140;
+update datasetfieldvalue set value='19.216667' where id=117151;
+update datasetfieldvalue set value='69.583333' where id=117152;
+update datasetfieldvalue set value='69.583333' where id=117191;
+update datasetfieldvalue set value='19.216667' where id=117198;
+update datasetfieldvalue set value='19.216667' where id=117200;
+update datasetfieldvalue set value='69.583333' where id=117210;
+update datasetfieldvalue set value='69.583333' where id=117216;
+update datasetfieldvalue set value='19.216667' where id=117237;
+update datasetfieldvalue set value='19.216667' where id=117246;
+update datasetfieldvalue set value='69.583333' where id=117254;
+update datasetfieldvalue set value='69.583333' where id=117264;
+update datasetfieldvalue set value='19.216667' where id=117271;
+update datasetfieldvalue set value='69.583333' where id=117278;
+update datasetfieldvalue set value='19.216667' where id=117290;
+update datasetfieldvalue set value='19.216667' where id=117302;
+update datasetfieldvalue set value='19.216667' where id=117319;
+update datasetfieldvalue set value='69.583333' where id=117331;
+update datasetfieldvalue set value='69.583333' where id=117336;
+update datasetfieldvalue set value='19.216667' where id=117351;
+update datasetfieldvalue set value='19.216667' where id=117358;
+update datasetfieldvalue set value='69.583333' where id=117374;
+update datasetfieldvalue set value='69.583333' where id=117377;
+update datasetfieldvalue set value='19.216667' where id=117401;
+update datasetfieldvalue set value='69.583333' where id=117402;
+update datasetfieldvalue set value='19.216667' where id=117417;
+update datasetfieldvalue set value='69.583333' where id=117420;
+update datasetfieldvalue set value='19.216667' where id=117456;
+update datasetfieldvalue set value='69.583333' where id=117457;
+update datasetfieldvalue set value='19.216667' where id=117463;
+update datasetfieldvalue set value='69.583333' where id=117467;
+update datasetfieldvalue set value='19.216667' where id=117470;
+update datasetfieldvalue set value='69.583333' where id=117487;
+update datasetfieldvalue set value='19.216667' where id=117488;
+update datasetfieldvalue set value='69.583333' where id=117506;
+update datasetfieldvalue set value='19.216667' where id=117514;
+update datasetfieldvalue set value='69.583333' where id=117536;
+update datasetfieldvalue set value='69.583333' where id=117540;
+update datasetfieldvalue set value='19.216667' where id=117547;
+update datasetfieldvalue set value='69.583333' where id=117553;
+update datasetfieldvalue set value='69.583333' where id=117556;
+update datasetfieldvalue set value='19.216667' where id=117575;
+update datasetfieldvalue set value='19.216667' where id=117591;
+update datasetfieldvalue set value='69.583333' where id=117597;
+update datasetfieldvalue set value='69.583333' where id=117617;
+update datasetfieldvalue set value='19.216667' where id=117619;
+update datasetfieldvalue set value='19.216667' where id=117634;
+update datasetfieldvalue set value='69.583333' where id=117648;
+update datasetfieldvalue set value='19.216667' where id=117649;
+update datasetfieldvalue set value='69.583333' where id=117650;
+update datasetfieldvalue set value='19.216667' where id=117666;
+update datasetfieldvalue set value='19.216667' where id=117695;
+update datasetfieldvalue set value='69.583333' where id=117702;
+update datasetfieldvalue set value='19.216667' where id=117711;
+update datasetfieldvalue set value='69.583333' where id=117713;
+update datasetfieldvalue set value='69.583333' where id=117723;
+update datasetfieldvalue set value='19.216667' where id=117732;
+update datasetfieldvalue set value='69.583333' where id=117734;
+update datasetfieldvalue set value='19.216667' where id=117759;
+update datasetfieldvalue set value='69.583333' where id=117773;
+update datasetfieldvalue set value='19.216667' where id=117789;
+update datasetfieldvalue set value='69.583333' where id=117792;
+update datasetfieldvalue set value='19.216667' where id=117797;
+update datasetfieldvalue set value='19.216667' where id=117812;
+update datasetfieldvalue set value='19.216667' where id=117817;
+update datasetfieldvalue set value='69.583333' where id=117831;
+update datasetfieldvalue set value='69.583333' where id=117835;
+update datasetfieldvalue set value='69.583333' where id=117855;
+update datasetfieldvalue set value='19.216667' where id=117876;
+update datasetfieldvalue set value='69.583333' where id=117879;
+update datasetfieldvalue set value='19.216667' where id=117883;
+update datasetfieldvalue set value='19.216667' where id=117891;
+update datasetfieldvalue set value='69.583333' where id=117899;
+update datasetfieldvalue set value='19.216667' where id=117914;
+update datasetfieldvalue set value='69.583333' where id=117917;
+update datasetfieldvalue set value='19.216667' where id=117935;
+update datasetfieldvalue set value='69.583333' where id=117951;
+update datasetfieldvalue set value='19.216667' where id=117953;
+update datasetfieldvalue set value='69.583333' where id=117962;
+update datasetfieldvalue set value='19.216667' where id=117977;
+update datasetfieldvalue set value='69.583333' where id=117984;
+update datasetfieldvalue set value='19.216667' where id=117990;
+update datasetfieldvalue set value='69.583333' where id=117991;
+update datasetfieldvalue set value='19.216667' where id=118016;
+update datasetfieldvalue set value='19.216667' where id=118025;
+update datasetfieldvalue set value='69.583333' where id=118038;
+update datasetfieldvalue set value='69.583333' where id=118053;
+update datasetfieldvalue set value='19.216667' where id=118063;
+update datasetfieldvalue set value='69.583333' where id=118070;
+update datasetfieldvalue set value='19.216667' where id=118075;
+update datasetfieldvalue set value='69.583333' where id=118096;
+update datasetfieldvalue set value='19.216667' where id=118102;
+update datasetfieldvalue set value='19.216667' where id=118121;
+update datasetfieldvalue set value='69.583333' where id=118129;
+update datasetfieldvalue set value='69.583333' where id=118139;
+update datasetfieldvalue set value='19.216667' where id=118141;
+update datasetfieldvalue set value='19.216667' where id=118155;
+update datasetfieldvalue set value='69.583333' where id=118161;
+update datasetfieldvalue set value='69.583333' where id=118168;
+update datasetfieldvalue set value='19.216667' where id=118189;
+update datasetfieldvalue set value='69.583333' where id=118198;
+update datasetfieldvalue set value='69.583333' where id=118201;
+update datasetfieldvalue set value='19.216667' where id=118218;
+update datasetfieldvalue set value='19.216667' where id=118231;
+update datasetfieldvalue set value='69.583333' where id=118239;
+update datasetfieldvalue set value='69.583333' where id=118241;
+update datasetfieldvalue set value='19.216667' where id=118259;
+update datasetfieldvalue set value='19.216667' where id=118266;
+update datasetfieldvalue set value='19.216667' where id=118269;
+update datasetfieldvalue set value='69.583333' where id=118276;
+update datasetfieldvalue set value='69.583333' where id=118295;
+update datasetfieldvalue set value='69.583333' where id=118308;
+update datasetfieldvalue set value='19.216667' where id=118312;
+update datasetfieldvalue set value='19.216667' where id=118340;
+update datasetfieldvalue set value='69.583333' where id=118346;
+update datasetfieldvalue set value='69.583333' where id=118352;
+update datasetfieldvalue set value='19.216667' where id=118364;
+update datasetfieldvalue set value='19.216667' where id=118367;
+update datasetfieldvalue set value='69.583333' where id=118377;
+update datasetfieldvalue set value='19.216667' where id=118410;
+update datasetfieldvalue set value='19.216667' where id=118418;
+update datasetfieldvalue set value='69.583333' where id=118421;
+update datasetfieldvalue set value='69.583333' where id=118431;
+update datasetfieldvalue set value='69.583333' where id=118440;
+update datasetfieldvalue set value='19.216667' where id=118465;
+update datasetfieldvalue set value='69.583333' where id=118466;
+update datasetfieldvalue set value='19.216667' where id=118470;
+update datasetfieldvalue set value='69.583333' where id=118490;
+update datasetfieldvalue set value='19.216667' where id=118492;
+update datasetfieldvalue set value='19.216667' where id=118504;
+update datasetfieldvalue set value='69.583333' where id=118515;
+update datasetfieldvalue set value='69.583333' where id=118519;
+update datasetfieldvalue set value='69.583333' where id=118527;
+update datasetfieldvalue set value='19.216667' where id=118534;
+update datasetfieldvalue set value='19.216667' where id=118543;
+update datasetfieldvalue set value='19.216667' where id=118562;
+update datasetfieldvalue set value='69.583333' where id=118578;
+update datasetfieldvalue set value='69.583333' where id=118582;
+update datasetfieldvalue set value='19.216667' where id=118587;
+update datasetfieldvalue set value='19.216667' where id=118624;
+update datasetfieldvalue set value='69.583333' where id=118630;
+update datasetfieldvalue set value='69.583333' where id=118637;
+update datasetfieldvalue set value='19.216667' where id=118642;
+update datasetfieldvalue set value='19.216667' where id=118661;
+update datasetfieldvalue set value='19.216667' where id=118671;
+update datasetfieldvalue set value='69.583333' where id=118677;
+update datasetfieldvalue set value='69.583333' where id=118679;
+update datasetfieldvalue set value='69.583333' where id=118691;
+update datasetfieldvalue set value='69.583333' where id=118698;
+update datasetfieldvalue set value='19.216667' where id=118699;
+update datasetfieldvalue set value='19.216667' where id=118716;
+update datasetfieldvalue set value='19.216667' where id=118729;
+update datasetfieldvalue set value='69.583333' where id=118730;
+update datasetfieldvalue set value='69.583333' where id=118733;
+update datasetfieldvalue set value='19.216667' where id=118738;
+update datasetfieldvalue set value='19.216667' where id=118771;
+update datasetfieldvalue set value='69.583333' where id=118806;
+update datasetfieldvalue set value='19.216667' where id=118807;
+update datasetfieldvalue set value='69.583333' where id=118810;
+update datasetfieldvalue set value='19.216667' where id=118812;
+update datasetfieldvalue set value='69.583333' where id=118832;
+update datasetfieldvalue set value='69.583333' where id=118838;
+update datasetfieldvalue set value='19.216667' where id=118845;
+update datasetfieldvalue set value='19.216667' where id=118856;
+update datasetfieldvalue set value='69.583333' where id=118860;
+update datasetfieldvalue set value='19.216667' where id=118861;
+update datasetfieldvalue set value='69.583333' where id=118873;
+update datasetfieldvalue set value='19.216667' where id=118896;
+update datasetfieldvalue set value='69.583333' where id=118918;
+update datasetfieldvalue set value='19.216667' where id=118923;
+update datasetfieldvalue set value='69.583333' where id=118933;
+update datasetfieldvalue set value='69.583333' where id=118962;
+update datasetfieldvalue set value='19.216667' where id=118964;
+update datasetfieldvalue set value='19.216667' where id=118975;
+update datasetfieldvalue set value='69.583333' where id=118978;
+update datasetfieldvalue set value='69.583333' where id=118988;
+update datasetfieldvalue set value='19.216667' where id=118991;
+update datasetfieldvalue set value='69.583333' where id=119011;
+update datasetfieldvalue set value='19.216667' where id=119016;
+update datasetfieldvalue set value='19.216667' where id=119030;
+update datasetfieldvalue set value='69.583333' where id=119041;
+update datasetfieldvalue set value='69.583333' where id=119043;
+update datasetfieldvalue set value='19.216667' where id=119063;
+update datasetfieldvalue set value='69.583333' where id=119073;
+update datasetfieldvalue set value='19.216667' where id=119076;
+update datasetfieldvalue set value='69.583333' where id=119078;
+update datasetfieldvalue set value='19.216667' where id=119083;
+update datasetfieldvalue set value='69.583333' where id=119112;
+update datasetfieldvalue set value='19.216667' where id=119126;
+update datasetfieldvalue set value='69.583333' where id=119133;
+update datasetfieldvalue set value='19.216667' where id=119145;
+update datasetfieldvalue set value='69.583333' where id=119156;
+update datasetfieldvalue set value='19.216667' where id=119165;
+update datasetfieldvalue set value='69.583333' where id=119170;
+update datasetfieldvalue set value='19.216667' where id=119179;
+update datasetfieldvalue set value='19.216667' where id=119203;
+update datasetfieldvalue set value='19.216667' where id=119205;
+update datasetfieldvalue set value='69.583333' where id=119211;
+update datasetfieldvalue set value='69.583333' where id=119214;
+update datasetfieldvalue set value='69.583333' where id=119232;
+update datasetfieldvalue set value='19.216667' where id=119237;
+update datasetfieldvalue set value='19.216667' where id=119240;
+update datasetfieldvalue set value='69.583333' where id=119253;
+update datasetfieldvalue set value='69.583333' where id=119283;
+update datasetfieldvalue set value='19.216667' where id=119309;
+update datasetfieldvalue set value='69.583333' where id=119310;
+update datasetfieldvalue set value='19.216667' where id=119311;
+update datasetfieldvalue set value='19.216667' where id=119323;
+update datasetfieldvalue set value='19.216667' where id=119332;
+update datasetfieldvalue set value='69.583333' where id=119334;
+update datasetfieldvalue set value='69.583333' where id=119348;
+update datasetfieldvalue set value='69.583333' where id=119363;
+update datasetfieldvalue set value='19.216667' where id=119382;
+update datasetfieldvalue set value='19.216667' where id=119393;
+update datasetfieldvalue set value='69.583333' where id=119397;
+update datasetfieldvalue set value='69.583333' where id=119416;
+update datasetfieldvalue set value='19.216667' where id=119419;
+update datasetfieldvalue set value='19.216667' where id=119421;
+update datasetfieldvalue set value='69.583333' where id=119428;
+update datasetfieldvalue set value='19.216667' where id=119458;
+update datasetfieldvalue set value='19.216667' where id=119473;
+update datasetfieldvalue set value='69.583333' where id=119476;
+update datasetfieldvalue set value='69.583333' where id=119482;
+update datasetfieldvalue set value='69.583333' where id=119494;
+update datasetfieldvalue set value='69.583333' where id=119500;
+update datasetfieldvalue set value='19.216667' where id=119514;
+update datasetfieldvalue set value='19.216667' where id=119515;
+update datasetfieldvalue set value='82.86' where id=122055;
+update datasetfieldvalue set value='6.36' where id=122072;
+update datasetfieldvalue set value='82.92' where id=122078;
+update datasetfieldvalue set value='6.12' where id=122094;
+update datasetfieldvalue set value='110.13892' where id=124620;
+update datasetfieldvalue set value='38.93792' where id=124633;
+update datasetfieldvalue set value='10.7833' where id=126264;
+update datasetfieldvalue set value='59.6667' where id=126266;
+update datasetfieldvalue set value='19.525' where id=126584;
+update datasetfieldvalue set value='75.919' where id=126585;
+update datasetfieldvalue set value='19.745' where id=126586;
+update datasetfieldvalue set value='19.175' where id=126590;
+update datasetfieldvalue set value='75.918' where id=126591;
+update datasetfieldvalue set value='76.400' where id=126596;
+update datasetfieldvalue set value='76.069' where id=126600;
+update datasetfieldvalue set value='21.861' where id=126603;
+update datasetfieldvalue set value='78.79730' where id=127757;
+update datasetfieldvalue set value='79.95' where id=129615;
+update datasetfieldvalue set value='5.45' where id=129621;
+update datasetfieldvalue set value='25.4698775' where id=131167;
+update datasetfieldvalue set value='24.640115' where id=131171;
+update datasetfieldvalue set value='69.5056022' where id=131172;
+update datasetfieldvalue set value='70.7038205' where id=131175;
+update datasetfieldvalue set value='64.00' where id=131810;
+update datasetfieldvalue set value='69.00' where id=131811;
+update datasetfieldvalue set value='78.5' where id=133216;
+update datasetfieldvalue set value='78.3' where id=133223;
+update datasetfieldvalue set value='5.5' where id=133226;
+update datasetfieldvalue set value='23.916667' where id=134095;
+update datasetfieldvalue set value='31' where id=134106;
+update datasetfieldvalue set value='69.75' where id=134118;
+update datasetfieldvalue set value='77' where id=134120;
\ No newline at end of file
From 440f1630884a6cb27913c9e321f069f9b7ab9059 Mon Sep 17 00:00:00 2001
From: Philipp Conzett
Date: Wed, 1 Mar 2023 06:12:31 +0100
Subject: [PATCH 249/354] Update _dvno_geolocation_cleaning_v5_13.sql
Short overview of changes:
VYWBAD:
Changed: set value=null (we don't know what coordinates the researcher had in mind; we'll fix this after cloud migration)
update datasetfieldvalue set value=null where id=17895;
update datasetfieldvalue set value=null where id=17896;
update datasetfieldvalue set value=null where id=17897;
update datasetfieldvalue set value=null where id=17898;
update datasetfieldvalue set value=null where id=21626;
update datasetfieldvalue set value=null where id=21636;
update datasetfieldvalue set value=null where id=21644;
update datasetfieldvalue set value=null where id=21651;
9QKUIQ:
No changes; use 77, we'll fix this after cloud migration.
update datasetfieldvalue set value='23.916667' where id=134095;
update datasetfieldvalue set value='31' where id=134106;
update datasetfieldvalue set value='69.75' where id=134118;
update datasetfieldvalue set value='77' where id=134120;
---
.../migration/_dvno_geolocation_cleaning_v5_13.sql | 10 +++++++++-
1 file changed, 9 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
index cb9acd5..18510b7 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
@@ -30,6 +30,10 @@ update datasetfieldvalue set value='71.1' where id=17871;
update datasetfieldvalue set value='60.8 ' where id=17874;
update datasetfieldvalue set value='58.0 ' where id=17889;
update datasetfieldvalue set value='31.1' where id=17891;
+update datasetfieldvalue set value=null where id=17895;
+update datasetfieldvalue set value=null where id=17896;
+update datasetfieldvalue set value=null where id=17897;
+update datasetfieldvalue set value=null where id=17898;
update datasetfieldvalue set value='69.583333' where id=17930;
update datasetfieldvalue set value='19.216667' where id=17939;
update datasetfieldvalue set value='19.216667' where id=17944;
@@ -378,6 +382,10 @@ update datasetfieldvalue set value='19.216667' where id=21590;
update datasetfieldvalue set value='69.583333' where id=21606;
update datasetfieldvalue set value='19.216667' where id=21610;
update datasetfieldvalue set value='69.583333' where id=21611;
+update datasetfieldvalue set value=null where id=21626;
+update datasetfieldvalue set value=null where id=21636;
+update datasetfieldvalue set value=null where id=21644;
+update datasetfieldvalue set value=null where id=21651;
update datasetfieldvalue set value='69.583333' where id=21954;
update datasetfieldvalue set value='69.583333' where id=21962;
update datasetfieldvalue set value='19.216667' where id=21969;
@@ -3256,4 +3264,4 @@ update datasetfieldvalue set value='5.5' where id=133226;
update datasetfieldvalue set value='23.916667' where id=134095;
update datasetfieldvalue set value='31' where id=134106;
update datasetfieldvalue set value='69.75' where id=134118;
-update datasetfieldvalue set value='77' where id=134120;
\ No newline at end of file
+update datasetfieldvalue set value='77' where id=134120;
From d7e9c5164401974bab9f739a930b8e6dad08737f Mon Sep 17 00:00:00 2001
From: Philipp Conzett
Date: Sun, 5 Mar 2023 14:59:29 +0100
Subject: [PATCH 250/354] Update _dvno_geolocation_cleaning_v5_13.sql
Updated SQL queries for these datasets (DOIs):
EA5BYX | South Latitude was larger than North Latitude
HVR5AU | South Latitude was larger than North Latitude
GXMK7K | South Latitude was larger than North Latitude
UFUYIC | South Latitude was larger than North Latitude
VYWBAD | Entire row/entry needs to be deleted (search for "delete" in the SQL queries if you want to make sure the query is correct)
QHUAZ2 | South Latitude was larger than North Latitude
---
.../_dvno_geolocation_cleaning_v5_13.sql | 34 +++++++++++--------
1 file changed, 20 insertions(+), 14 deletions(-)
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
index 18510b7..23c60fe 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
@@ -30,10 +30,10 @@ update datasetfieldvalue set value='71.1' where id=17871;
update datasetfieldvalue set value='60.8 ' where id=17874;
update datasetfieldvalue set value='58.0 ' where id=17889;
update datasetfieldvalue set value='31.1' where id=17891;
-update datasetfieldvalue set value=null where id=17895;
-update datasetfieldvalue set value=null where id=17896;
-update datasetfieldvalue set value=null where id=17897;
-update datasetfieldvalue set value=null where id=17898;
+DELETE FROM datasetfieldvalue WHERE id=17895;
+DELETE FROM datasetfieldvalue WHERE id=17896;
+DELETE FROM datasetfieldvalue WHERE id=17897;
+DELETE FROM datasetfieldvalue WHERE id=17898;
update datasetfieldvalue set value='69.583333' where id=17930;
update datasetfieldvalue set value='19.216667' where id=17939;
update datasetfieldvalue set value='19.216667' where id=17944;
@@ -1602,14 +1602,14 @@ update datasetfieldvalue set value='4.9' where id=38666;
update datasetfieldvalue set value='71.1' where id=38672;
update datasetfieldvalue set value='58.0' where id=38691;
update datasetfieldvalue set value='31.1' where id=38695;
-update datasetfieldvalue set value='73.3' where id=42813;
+update datasetfieldvalue set value='73.4' where id=42813;
update datasetfieldvalue set value='7.05' where id=42815;
-update datasetfieldvalue set value='73.4' where id=42816;
+update datasetfieldvalue set value='73.3' where id=42816;
update datasetfieldvalue set value='8.25' where id=42818;
update datasetfieldvalue set value='7.05' where id=43296;
-update datasetfieldvalue set value='73.4' where id=43322;
+update datasetfieldvalue set value='73.3' where id=43322;
update datasetfieldvalue set value='8.25' where id=43333;
-update datasetfieldvalue set value='73.3' where id=43339;
+update datasetfieldvalue set value='73.4' where id=43339;
update datasetfieldvalue set value='58.0' where id=45387;
update datasetfieldvalue set value='18.0' where id=45391;
update datasetfieldvalue set value='11.0' where id=45394;
@@ -1618,12 +1618,14 @@ update datasetfieldvalue set value='18.0' where id=45467;
update datasetfieldvalue set value='58.0' where id=45470;
update datasetfieldvalue set value='11.0' where id=45471;
update datasetfieldvalue set value='62.0' where id=45487;
-update datasetfieldvalue set value='73.3' where id=46004;
+update datasetfieldvalue set value='73.4' where id=46004;
update datasetfieldvalue set value='7.05' where id=46021;
-update datasetfieldvalue set value='73.4' where id=46023;
+update datasetfieldvalue set value='73.3' where id=46023;
update datasetfieldvalue set value='8.25' where id=46039;
update datasetfieldvalue set value='38.93792' where id=47525;
update datasetfieldvalue set value='110.13892' where id=47531;
+update datasetfieldvalue set value='82.0' where id=47779;
+update datasetfieldvalue set value='72.0' where id=47809;
update datasetfieldvalue set value='69.583333' where id=56318;
update datasetfieldvalue set value='19.216667' where id=56331;
update datasetfieldvalue set value='69.583333' where id=56339;
@@ -1645,14 +1647,16 @@ update datasetfieldvalue set value='31.2' where id=60779;
update datasetfieldvalue set value='71.2' where id=60798;
update datasetfieldvalue set value='4.5' where id=60810;
update datasetfieldvalue set value='57.9' where id=60820;
+update datasetfieldvalue set value='48.46494' where id=62442;
+update datasetfieldvalue set value='48.02265' where id=62445;
update datasetfieldvalue set value='66.027306' where id=63491;
update datasetfieldvalue set value='16.467778' where id=63495;
update datasetfieldvalue set value='16.45' where id=63714;
update datasetfieldvalue set value='66.016667' where id=63715;
-update datasetfieldvalue set value='78.659556' where id=64995;
+update datasetfieldvalue set value='78.656806' where id=64995;
update datasetfieldvalue set value='16.435583' where id=64997;
update datasetfieldvalue set value='16.405389' where id=64998;
-update datasetfieldvalue set value='78.656806' where id=64999;
+update datasetfieldvalue set value='78.659556' where id=64999;
update datasetfieldvalue set value='14.344' where id=75142;
update datasetfieldvalue set value='14.866' where id=75152;
update datasetfieldvalue set value='68.950' where id=75164;
@@ -1702,9 +1706,9 @@ update datasetfieldvalue set value='82.92' where id=87444;
update datasetfieldvalue set value='6.36' where id=87445;
update datasetfieldvalue set value='6.12' where id=87446;
update datasetfieldvalue set value='16.435583' where id=88079;
-update datasetfieldvalue set value='78.659556' where id=88091;
+update datasetfieldvalue set value='78.656806' where id=88091;
update datasetfieldvalue set value='16.405389' where id=88099;
-update datasetfieldvalue set value='78.656806' where id=88108;
+update datasetfieldvalue set value='78.659556' where id=88108;
update datasetfieldvalue set value='31.76' where id=88439;
update datasetfieldvalue set value='71.38' where id=88454;
update datasetfieldvalue set value='57.76' where id=88460;
@@ -3258,6 +3262,8 @@ update datasetfieldvalue set value='69.5056022' where id=131172;
update datasetfieldvalue set value='70.7038205' where id=131175;
update datasetfieldvalue set value='64.00' where id=131810;
update datasetfieldvalue set value='69.00' where id=131811;
+update datasetfieldvalue set value='60.2' where id=132959;
+update datasetfieldvalue set value='60.5' where id=132960;
update datasetfieldvalue set value='78.5' where id=133216;
update datasetfieldvalue set value='78.3' where id=133223;
update datasetfieldvalue set value='5.5' where id=133226;
From 5e84e301e58920c18a30a42975aaf00036fb9ba5 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 7 Mar 2023 08:19:37 +0000
Subject: [PATCH 251/354] cronjobs
---
distros/dataverse.no/init.d/cronjob/dumpdatabase.sh | 6 ++++++
distros/dataverse.no/runOnce/crontab.sh | 8 ++++++++
2 files changed, 14 insertions(+)
create mode 100755 distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
create mode 100644 distros/dataverse.no/runOnce/crontab.sh
diff --git a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
new file mode 100755
index 0000000..f35db71
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+docker exec --user postgres postgres pg_dump -U dataverse dataverse > /mnt/dataverse.dump
+gzip -f /mnt/dataverse.dump
+cp /mnt/dataverse.dump.gz /mntblob/databaseDumps/dataverse.dump.gz`date +%Y%m%d`
+rsync -arvP --rsh=ssh /mnt/dataverse.dump.gz DVmtr-cmp01:/tmp/dataverse.dump.gz
+
diff --git a/distros/dataverse.no/runOnce/crontab.sh b/distros/dataverse.no/runOnce/crontab.sh
new file mode 100644
index 0000000..33f8355
--- /dev/null
+++ b/distros/dataverse.no/runOnce/crontab.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+mkdir /mntblob/databaseDumps/
+chown omsagent /mntblob/databaseDumps/
+usermod -aG docker omsagent
+cp -r /distib/private/.ssh /var/opt/microsoft/omsagent/run
+chown -R omsagent /var/opt/microsoft/omsagent/run/.ssh
+#0 0 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log
+#0 0 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log
From 78968954bc1c08b604b31a3fb6359e97eb348c68 Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 7 Mar 2023 10:39:14 +0100
Subject: [PATCH 252/354] added saml paket lokup plugin
---
doc/maintenance.rst | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/doc/maintenance.rst b/doc/maintenance.rst
index 0ed20ff..31c92be 100644
--- a/doc/maintenance.rst
+++ b/doc/maintenance.rst
@@ -186,3 +186,11 @@ Delete action logs older then 90 days
dataverse=# UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'S3://','file://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%S3://%');
+Resourcess
+----------
+
+samL packet lookup
+
+https://addons.mozilla.org/en-US/firefox/addon/saml-tracer/
+
+
From 3167e95fb7e9987cbc613ccc4ea5b1622ab38bb1 Mon Sep 17 00:00:00 2001
From: Philipp Conzett
Date: Tue, 7 Mar 2023 10:53:05 +0100
Subject: [PATCH 253/354] Update _dvno_geolocation_cleaning_v5_13.sql
New update. Replaced DELETE queries with approx. coordinates.
---
.../migration/_dvno_geolocation_cleaning_v5_13.sql | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
index 23c60fe..9eb8d5b 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
@@ -30,10 +30,10 @@ update datasetfieldvalue set value='71.1' where id=17871;
update datasetfieldvalue set value='60.8 ' where id=17874;
update datasetfieldvalue set value='58.0 ' where id=17889;
update datasetfieldvalue set value='31.1' where id=17891;
-DELETE FROM datasetfieldvalue WHERE id=17895;
-DELETE FROM datasetfieldvalue WHERE id=17896;
-DELETE FROM datasetfieldvalue WHERE id=17897;
-DELETE FROM datasetfieldvalue WHERE id=17898;
+update datasetfieldvalue set value='31.05' where id=17895;
+update datasetfieldvalue set value='28.17' where id=17896;
+update datasetfieldvalue set value='69.0' where id=17897;
+update datasetfieldvalue set value='70.90' where id=17898;
update datasetfieldvalue set value='69.583333' where id=17930;
update datasetfieldvalue set value='19.216667' where id=17939;
update datasetfieldvalue set value='19.216667' where id=17944;
From 1f495da3fe904b241c1337b8c2f0cc2f7bb7766f Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 7 Mar 2023 10:29:44 +0000
Subject: [PATCH 254/354] added backup of draft
---
distros/dataverse.no/init.d/cronjob/backupfiles.sql | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
index 1fd5b04..dd92981 100644
--- a/distros/dataverse.no/init.d/cronjob/backupfiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
@@ -1,7 +1,10 @@
--select storageidentifier from dvobject where modificationtime>='2022-12-05';
-select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day');
+--select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day');
+
+
+select storageidentifier, CONCAT( get_authority(id), '/', get_identifier(id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')) from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '7 day');
From 0bd74cba87f93e680a1fee22be272d1ebb1bdbcc Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Mar 2023 12:44:00 +0100
Subject: [PATCH 255/354] Update README.rst
---
doc/README.rst | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/doc/README.rst b/doc/README.rst
index b0bea1e..a9de6af 100644
--- a/doc/README.rst
+++ b/doc/README.rst
@@ -14,7 +14,7 @@ Requirements & Prerequisites
SSH to a working VM as the administrator and make sure that you have sudo rights to install.
-The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured
+The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured:
- Authentication integration options (Login to DatavarseNO):
@@ -22,21 +22,21 @@ The DataverseNO Docker Container platform stands on the shoulders of many other
- Azure OpenID - OpenID Connect is a security-token based extension of the OAuth 2.0 authorization protocol to do single sign-on. Azure OpenID supports single sign-on and API access to Dataverse application.
- [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places. Authentication with ORCID is supported in Dataverse. Registration for the production Members API service is open to ORCID members organizations only. UiT is an organisation member and registered with ORCID.
- [eduGAIN](https://edugain.org) - The eduGAIN interfederation service connects identity federations around the world, simplifying access to content, services and resources for the global research and education community.
-- SMTP server - is used to send, and relay outgoing email between DataverseNO (sender) and receivers. We use UiT smtp server.
+- SMTP server - is used to send, and relay outgoing email between DataverseNO (sender) and receivers. We use our UiT smtp server.
- Storage
- VM server - Stores the Dataverse application, custom scripts and configuration files.
- - Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for your capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
+ - Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud.
- Blob storage (mounted on the VM)
- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers.
-Steps needed to implementation of DataverseNO Docker Container:
+ Steps needed to implementation of DataverseNO Docker Container:
- 1) Installation of Docker on a VM machine,
- 2) Creation of Dockerfile, and
- 3) Building images and development of Dockerfile.
+ 1) Installation of Docker on a VM machine,
+ 2) Creation of Dockerfile, and
+ 3) Building images and development of Dockerfile.
* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done.
From da94d09ca5fbc96cf59b0cb410bf2c3a941444d9 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Mar 2023 17:07:28 +0100
Subject: [PATCH 256/354] Update installation.rst
---
doc/installation.rst | 16 ++++++++++------
1 file changed, 10 insertions(+), 6 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index e14d120..5444fad 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -9,9 +9,9 @@ Update APT sources
This needs to be done so as to access packages from Docker repository.
-1. Log into your VM machine as a user with sudo or root privileges.
+1. Log into your VM machine as a user.with sudo or root privileges.
-2. Open a terminal window.
+2. On your terminal, sudo or root privileges
3. Update package information, ensure that APT works with the https method, and that CA certificates are installed.
@@ -32,11 +32,15 @@ This needs to be done so as to access packages from Docker repository.
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+If you get the following message: "File '/usr/share/keyrings/docker-archive-keyring.gpg' exists. Overwrite? (y/N)". Answer "y"
+
Verify that the key fingerprint is for example 9DC8 5822 9FC7 DD38 854A E2D8 8D81 803C 0EBF CD88
.. code-block:: bash
-sudo apt-key fingerprint 0EBFCD88
+sudo apt-key fingerprint
+
+to see where your trusted.gpd
5. Fnd the entry in the table below which corresponds to your Ubuntu version. This determines
where APT will search for Docker packages.
@@ -110,9 +114,9 @@ We have a pre-configured environment variables (.env) stored at our resource arc
.. code-block:: bash
- cd /$DISTRIB
- tar -xvzf /tmp/resourses.tar.gz
- cp $DISTRIB/private/.env $DISTRIB/dataverse-docker/
+ cd /tmp
+ tar -xvzf /resourses.tar.gz
+ cp /tmp/distrib/private/.env $DISTRIB/dataverse-docker/
go to "Check that your dataverse instalation is accessible"
From 5fd17019fda86dbec07cea71996b4ac11cb564c2 Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 7 Mar 2023 17:32:18 +0100
Subject: [PATCH 257/354] Update installation.rst
---
doc/installation.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/installation.rst b/doc/installation.rst
index 5444fad..d44f551 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -118,9 +118,9 @@ We have a pre-configured environment variables (.env) stored at our resource arc
tar -xvzf /resourses.tar.gz
cp /tmp/distrib/private/.env $DISTRIB/dataverse-docker/
-go to "Check that your dataverse instalation is accessible"
+go to "Check that your dataverse installation is accessible"
-The following variables need to be changed in .env
+The following variables (domain name) need to be changed in .env
.. code-block:: bash
From ee805116555e5f03df3e3966de81c8b662dbbfdc Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Thu, 9 Mar 2023 16:53:29 +0100
Subject: [PATCH 258/354] Update dataverse_footer.xhtml
---
distros/dataverse.no/modification/dataverse_footer.xhtml | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/modification/dataverse_footer.xhtml b/distros/dataverse.no/modification/dataverse_footer.xhtml
index 01be3ed..c5d8f02 100644
--- a/distros/dataverse.no/modification/dataverse_footer.xhtml
+++ b/distros/dataverse.no/modification/dataverse_footer.xhtml
@@ -31,7 +31,8 @@ DataverseNO is a national research data repository provided by partner institutions
-About DataverseNO | Contact
+About DataverseNO | Contact | Tilgjengelegheitserklæring (Accessibility Statement)
+
From d05d36684b162793074d2ff87c1c6ee0ecbd1516 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 13 Mar 2023 14:10:35 +0000
Subject: [PATCH 259/354] updated backup interval
---
distros/dataverse.no/init.d/cronjob/backupfiles.sql | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
index dd92981..65151bc 100644
--- a/distros/dataverse.no/init.d/cronjob/backupfiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
@@ -4,7 +4,7 @@
--select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day');
-select storageidentifier, CONCAT( get_authority(id), '/', get_identifier(id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')) from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '7 day');
+select storageidentifier, CONCAT( get_authority(id), '/', get_identifier(id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')) from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day');
From a07b0d503e15e04b203e0d3c03d08a41ac9ae365 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 16 Mar 2023 13:51:49 +0000
Subject: [PATCH 260/354] added backup fuction and removed unused pgpmyadmin
---
distros/dataverse.no/docker-compose.yaml | 12 -----
.../init.d/205-backup-sql-fuctions.sh | 4 ++
.../init.d/cronjob/backup-fuction.sql | 50 +++++++++++++++++++
3 files changed, 54 insertions(+), 12 deletions(-)
create mode 100755 distros/dataverse.no/init.d/205-backup-sql-fuctions.sh
create mode 100644 distros/dataverse.no/init.d/cronjob/backup-fuction.sql
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 089b1d9..87452db 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -104,18 +104,6 @@ services:
- "traefik.http.routers.solr.tls=true"
- "traefik.http.routers.solr.tls.certresolver=myresolver"
- phppgadmin:
- networks:
- - traefik
- image: marvambass/phppgadmin
- container_name: phppgadmin
- restart: always
- environment:
- - DISABLE_TLS=disable
- - DB_HOST=postgres
- ports:
- - 8095:80
-
whoami:
networks:
- traefik
diff --git a/distros/dataverse.no/init.d/205-backup-sql-fuctions.sh b/distros/dataverse.no/init.d/205-backup-sql-fuctions.sh
new file mode 100755
index 0000000..17e049b
--- /dev/null
+++ b/distros/dataverse.no/init.d/205-backup-sql-fuctions.sh
@@ -0,0 +1,4 @@
+#!/bin/bash
+export PGPASSWORD=`cat /secrets/db/password`
+psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backup-fuction.sql
+#select storageidentifier from dvobject where modificationtime>='2023-02-02';
diff --git a/distros/dataverse.no/init.d/cronjob/backup-fuction.sql b/distros/dataverse.no/init.d/cronjob/backup-fuction.sql
new file mode 100644
index 0000000..6500497
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/backup-fuction.sql
@@ -0,0 +1,50 @@
+drop function get_identifier;
+create function get_identifier(thisid int)
+returns text
+language plpgsql
+as
+$$
+declare
+ authoritystring TEXT;
+ v_sql text;
+begin
+ v_sql = 'select identifier from dvobject where id in (select owner_id from dvobject where id=' || thisid || ' order by id desc) order by id desc';
+ execute v_sql into authoritystring;
+ return authoritystring;
+end;
+$$;
+
+drop function get_ownership;
+create function get_ownership(thisid int)
+returns text
+language plpgsql
+as
+$$
+declare
+ authoritystring TEXT;
+ v_sql text;
+begin
+ v_sql = 'select concat_ws(authority, identifier) from dvobject where id in (select owner_id from dvobject where id=' || thisid || ' order by id desc) order by id desc';
+ execute v_sql into authoritystring;
+ return authoritystring;
+end;
+$$;
+
+
+
+drop function get_authority;
+create function get_authority(thisid int)
+returns text
+language plpgsql
+as
+$$
+declare
+ authoritystring TEXT;
+ v_sql text;
+begin
+ v_sql = 'select authority from dvobject where id in (select owner_id from dvobject where id=' || thisid || ' order by id desc) order by id desc';
+ execute v_sql into authoritystring;
+ return authoritystring;
+end;
+$$;
+
From 200c88c4477fa57c26c35154240a6bc95733eb80 Mon Sep 17 00:00:00 2001
From: Philipp Conzett
Date: Fri, 17 Mar 2023 15:48:42 +0100
Subject: [PATCH 261/354] Update affiliations.csv with new records
---
.../init.d/affiliations/affiliations.csv | 280 ++++++++++--------
1 file changed, 156 insertions(+), 124 deletions(-)
diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.csv b/distros/dataverse.no/init.d/affiliations/affiliations.csv
index 8879cb9..516ef55 100644
--- a/distros/dataverse.no/init.d/affiliations/affiliations.csv
+++ b/distros/dataverse.no/init.d/affiliations/affiliations.csv
@@ -1,124 +1,156 @@
-id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_abbreviation,bucketname_in_cloudian,bucketname_in_dataverseno,feide_id,feide_email,feide_name,feide_nb,feide_nn,feide_en,feide_se,ror_id,ror_name,ror_acronym,comments
-1,phs.no,phsno,&explicit/1-phsno,The Norwegian Police University College,PHS,,,54,phs.no,The Norwegian Police University College,Politihøgskolen,Politihøgskulen,The Norwegian Police University College,,https://ror.org/05486d596,Norwegian Police University College,PHS,
-2,uia.no,uiano,&explicit/1-uiano,University of Agder,UiA,2002-red-dataverseno-uia,cloudian-uia,55,uia.no,University of Agder,Universitetet i Agder,Universitetet i Agder,University of Agder,Agder Universitehta,https://ror.org/03x297z98,University of Agder,,
-3,nifu.no,nifuno,&explicit/1-nifuno,"Nordic Institute for Studies innovation, research and education",NIFU,,,58,nifu.no,NIFU,NIFU,NIFU,NIFU,,,,,
-4,hiof.no,hiofno,&explicit/1-hiofno,Østfold University College,HiØ,2002-red-dataverseno-hiof,cloudian-hiof,70,hiof.no,Østfold University College,Høgskolen i Østfold,Høgskulen i Østfold,Østfold University College,Østfold Allaskuvla,https://ror.org/04gf7fp41,Østfold University College,HiØ,
-5,aho.no,ahono,&explicit/1-ahono,The Oslo School of Architecture and Design,AHO,,,93,aho.no,The Oslo School of Architecture and Design,Arkitektur- og designhøgskolen i Oslo,Arkitektur- og designhøgskulen i Oslo,The Oslo School of Architecture and Design,,,,,
-6,cmi.no,cmino,&explicit/1-cmino,Chr. Michelsen Institute,CMI,,,96,cmi.no,Chr. Michelsen Institute,Chr. Michelsens Institutt,Chr. Michelsens Institutt,Chr. Michelsen Institute,,https://ror.org/02w7rbf39,Christian Michelsen Institute,CMI,
-7,mf.no,mfno,&explicit/1-mfno,"MF Norwegian School of Theology, Religion and Society",MF,2002-red-dataverseno-mf,cloudian-mf,100,mf.no,"MF Norwegian School of Theology, Religion and Society",MF vitenskapelig høyskole,MF vitenskapelig høyskole,"MF Norwegian School of Theology, Religion and Society",,https://ror.org/01qafy255,"MF Norwegian School of Theology, Religion and Society",,
-8,dmmh.no,dmmhno,&explicit/1-dmmhno,Queen Maud University College,DMMH,,,103,dmmh.no,Queen Maud University College,Dronning Mauds Minne Høgskole,Dronning Mauds Minne Høgskule,Queen Maud University College,,https://ror.org/043zemc40,Queen Maud University College,DMMH,
-9,nhh.no,nhhno,&explicit/1-nhhno,Norwegian School of Economics,NHH,,,119,nhh.no,Norwegian School of Economics,Norges Handelshøyskole,Norges Handelshøyskole,Norwegian School of Economics,,https://ror.org/04v53s997,Norwegian School of Economics,NHH,
-10,nla.no,nlano,&explicit/1-nlano,NLA University College,NLA,,,123,nla.no,NLA University College,NLA Høgskolen,NLA Høgskolen,NLA University College,NLA Høgskolen,https://ror.org/05fdt2q64,NLA University College,,
-11,npolar.no,npolarno,&explicit/1-npolarno,Norwegian Polar Institute,,,,124,,Norwegian Polar Institute,Norsk Polarinstitutt,,Norwegian Polar Institute,,https://ror.org/03avf6522,Norwegian Polar Institute,,
-12,nr.no,nrno,&explicit/1-nrno,Norwegian Computing Center,NR,,,125,nr.no,Norsk Regnesentral,Norsk Regnesentral,,,,https://ror.org/02gm7te43,Norwegian Computing Center,NR,
-13,sintef.no,sintefno,&explicit/1-sintefno,SINTEF,,,,131,sintef.no,SINTEF,SINTEF,SINTEF,SINTEF,,https://ror.org/01f677e56,SINTEF,,
-14,samiskhs.no,samiskhsno,&explicit/1-samiskhsno,Sámi allaskuvla – Sámi University College,,,,133,samas.no,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,Sámi allaskuvla – Samisk høgskule,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,https://ror.org/028ahgk39,Sámi University of Applied Sciences,,
-15,uib.no,uibno,&explicit/1-uibno,University of Bergen,UiB,2002-red-dataverseno-uib,cloudian-uib,137,uib.no,University of Bergen,Universitetet i Bergen,Universitetet i Bergen,University of Bergen,Bergen Universitehta,https://ror.org/03zga2b32,University of Bergen,,
-16,uio.no,uiono,&explicit/1-uiono,University of Oslo,UiO,2002-red-dataverseno-uio,cloudian-uio,138,uio.no,University of Oslo,Universitetet i Oslo,Universitetet i Oslo,University of Oslo,Oslo Universitehta,https://ror.org/01xtthb56,University of Oslo,UiO,
-17,uit.no,uitno,&explicit/1-uitno,UiT The Arctic University of Norway,UiT,2002-red-dataverseno-uit,cloudian-uit,139,uit.no,UiT The Arctic University of Norway,UiT Norges arktiske universitet,UiT Noregs arktiske universitet,UiT The Arctic University of Norway,UiT Norgga árktalaš universitehta,https://ror.org/00wge5k78,UiT The Arctic University of Norway,UiT,
-18,ntnu.no,ntnuno,&explicit/1-ntnuno,Norwegian University of Science and Technology,NTNU,2002-red-dataverseno-ntnu,cloudian-ntnu,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU,
-19,nina.no,ninano,&explicit/1-ninano,Norwegian Institute for Nature Research,NINA,,,144,nina.no,NINA,NINA,NINA,,,https://ror.org/04aha0598,Norwegian Institute for Nature Research,NINA,
-20,ngu.no,nguno,&explicit/1-nguno,Geological Survey of Norway,NGU,,,145,ngu.no,Geological Survey of Norway,Norges geologiske undersøkelse,Norges geologiske undersøkelse,Geological Survey of Norway,,,,,
-21,himolde.no,himoldeno,&explicit/1-himoldeno,Molde University College,HiM,,,158,himolde.no,Molde University College,Høgskolen i Molde,Høgskulen i Molde,Molde University College,Molde Allaskuvla,https://ror.org/00kxjcd28,Molde University College,HiM,
-22,nb.no,nbno,&explicit/1-nbno,National Library of Norway,NB,,,160,nb.no,National Library of Norway,Nasjonalbiblioteket,Nasjonalbiblioteket,National Library of Norway,,,,,
-23,uis.no,uisno,&explicit/1-uisno,University of Stavanger,UiS,2002-red-dataverseno-uis,cloudian-uis,163,uis.no,University of Stavanger,Universitetet i Stavanger,Universitetet i Stavanger,University of Stavanger,Stavanger Universitehta,https://ror.org/02qte9q33,University of Stavanger,UiS,
-24,hivolda.no,hivoldano,&explicit/1-hivoldano,Volda University College,HVO,,,165,hivolda.no,Volda University College,Høgskulen i Volda,Høgskulen i Volda,Volda University College,Volda Allaskuvla,https://ror.org/01eeqzy24,Volda University College,HVO,
-25,khio.no,khiono,&explicit/1-khiono,Oslo National Academy of the Arts,KhiO,,,195,khio.no,Oslo National Academy of the Arts,Kunsthøgskolen i Oslo,Kunsthøgskulen i Oslo,Oslo National Academy of the Arts,,https://ror.org/0543h9a62,Oslo National Academy of the Arts,,
-26,samfunnsforskning.no,samfunnsforskningno,&explicit/1-samfunnsforskningno,Institute for Social Research,IFS,,,197,samfunnsforskning.no,Institute for Social Research,Institutt for samfunnsforskning,,Institute for Social Research,,https://ror.org/05swz5441,Institute for Social Research,IFS,
-27,ldh.no,ldhno,&explicit/1-ldhno,Lovisenberg Diaconal University College,LDH,,,216,ldh.no,Lovisenberg diaconal university college,Lovisenberg diakonale høgskole,Lovisenberg diakonale høgskule,Lovisenberg diaconal university college,,https://ror.org/015rzvz05,Lovisenberg Diakonale Høgskole,LDH,
-28,fhi.no,fhino,&explicit/1-fhino,Norwegian Institute of Public Health,NIPH,,,310,fhi.no,Norwegian Institute of Public Health,Folkehelseinstituttet,,Norwegian Institute of Public Health,,https://ror.org/046nvst19,Norwegian Institute of Public Health,NIPH,
-29,nih.no,nihno,&explicit/1-nihno,Norwegian School of Sport Sciences,NSSS,,,323,nih.no,Norwegian School of Sport Sciences,Norges idrettshøgskole,Noregs idrettshøgskule,Norwegian School of Sport Sciences,,https://ror.org/045016w83,Norwegian School of Sport Sciences,NSSS,
-30,bi.no,bino,&explicit/1-bino,BI Norwegian Business School,BI,,,324,bi.no,BI Norwegian Business School,Handelshøyskolen BI,Handelshøyskolen BI,BI Norwegian Business School,,https://ror.org/03ez40v33,BI Norwegian Business School,,
-31,nmh.no,nmhno,&explicit/1-nmhno,Norwegian Academy of Music,NMH,,,325,nmh.no,Norwegian Academy of Music,Norges musikkhøgskole,Noregs musikkhøgskule,Norwegian Academy of Music,,https://ror.org/052dy9793,Norwegian Academy of Music,NMH,
-32,kristiania.no,kristianiano,&explicit/1-kristianiano,Kristiania University College,,,,17007,feide.egms.no,Kristiania University College,Høyskolen Kristiania,Høyskolen Kristiania,Kristiania University College,,https://ror.org/03gss5916,Campus Kristiania,,
-33,fhs.mil.no,fhsmilno,&explicit/1-fhsmilno,Norwegian Defence University College,NDUC,,,115267,mil.no,Norwegian Defence University College,Forsvarets høgskoler,Forsvarets høgskuler,Norwegian Defence University College,,https://ror.org/02vfz9j23,Norwegian Defence University College,NDUC,
-34,ansgarskolen.no,ansgarskolenno,&explicit/1-ansgarskolenno,Ansgar University College,,,,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,,
-35,oslomet.no,oslometno,&explicit/1-oslometno,OsloMet – Oslo Metropolitan University,HiOA,,,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA,
-36,nmbu.no,nmbuno,&explicit/1-nmbuno,Norwegian University of Life Sciences,NMBU,2002-red-dataverseno-nmbu,cloudian-nmbu,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU,
-37,nibio.no,nibiono,&explicit/1-nibiono,Norwegian Institute of Bioeconomy Research,NIBIO,,,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO,
-38,vid.no,vidno,&explicit/1-vidno,VID Specialized University,VID,2002-red-dataverseno-vid,cloudian-vid,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID,
-39,nord.no,nordno,&explicit/1-nordno,Nord University,,2002-red-dataverseno-nord,cloudian-nord,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,,
-40,usn.no,usnno,&explicit/1-usnno,University of South-Eastern Norway,USN,,,2066647,usn.no,University of South-Eastern Norway,Universitetet i Sørøst-Norge,Universitetet i Søraust-Noreg,University of South-Eastern Norway,,https://ror.org/05ecg5h20,University of South-Eastern Norway,USN,
-41,hvl.no,hvlno,&explicit/1-hvlno,Western Norway University of Applied Sciences,HVL,2002-red-dataverseno-hvl,cloudian-hvl,2126357,hvl.no,Western Norway University of Applied Sciences,Høgskulen på Vestlandet,Høgskulen på Vestlandet,Western Norway University of Applied Sciences,,https://ror.org/05phns765,Western Norway University of Applied Sciences,HVL,
-42,nkvts.no,nkvtsno,&explicit/1-nkvtsno,Norwegian centre for violence and traumatic stress studies,NKVTS,,,2127917,nkvts.no,Norwegian centre for violence and traumatic stress studies,Nasjonalt kunnskapssenter om vold og traumatisk stress,,Norwegian centre for violence and traumatic stress studies,,https://ror.org/01p618c36,Norwegian Centre for Violence and Traumatic Stress Studies,NKVTS,
-43,inn.no,innno,&explicit/1-innno,Inland Norway University of Applied Sciences,,2002-red-dataverseno-inn,cloudian-inn,2128215,inn.no,Inland Norway University of Applied Sciences,Høgskolen i Innlandet,,Inland Norway University of Applied Sciences,,https://ror.org/02dx4dc92,Inland Norway University of Applied Sciences,,
-44,vetinst.no,vetinstno,&explicit/1-vetinstno,Norwegian Veterinary Institute,NVI,,,2217125,vetinst.no,Veterinærinstituttet,Veterinærinstituttet,,,,https://ror.org/05m6y3182,Norwegian Veterinary Institute,NVI,
-45,nubu.no,nubuno,&explicit/1-nubuno,NUBU - The Norwegian Center for Child Behavioral Development,NUBU,,,2217221,nubu.no,NUBU - The Norwegian Center for Child Behavioral Development,NUBU - Nasjonalt utviklingssenter for barn og unge,,NUBU - The Norwegian Center for Child Behavioral Development,,,,,
-46,hlsenteret.no,hlsenteretno,&explicit/1-hlsenteretno,The Norwegian Center for Holocaust and Minority Studies,,,,2217222,hlsenteret.no,The Norwegian Center for Holocaust and Minority Studies,Senter for studier av Holocaust og livssynsminoriteter,,The Norwegian Center for Holocaust and Minority Studies,,https://ror.org/03ppkyp25,Center for Studies of Holocaust and Religious Minorities,,
-47,met.no,metno,&explicit/1-metno,Norwegian Meteorological Institute,MET,,,2217341,,Meteorologisk Institutt,Meteorologisk Institutt,,,,https://ror.org/001n36p86,Norwegian Meteorological Institute,MET,
-48,simula.no,simulano,&explicit/1-simulano,Simula Research Laboratory,,,,2217477,simula.no,Simula,Simula,,,,https://ror.org/00vn06n10,Simula Research Laboratory,,
-49,agderforskning.no,agderforskningno,&explicit/1-agderforskningno,Agder Research,,,,,,,,,,,https://ror.org/02k3w5n89,Agder Research,,
-50,akvaplan.niva.no,akvaplannivano,&explicit/1-akvaplannivano,Akvaplan-niva,,,,,,,,,,,https://ror.org/03nrps502,Akvaplan-niva (Norway),,
-51,arbark.no,arbarkno,&explicit/1-arbarkno,Norwegian Labour Movement Archives and Library,,,,,,,,,,,https://ror.org/05x91m338,Norwegian Labour Movement Archives and Library,,
-52,cas.oslo.no,casoslono,&explicit/1-casoslono,Centre for Advanced Study,CAS,,,,,,,,,,https://ror.org/05rbhea42,Centre for Advanced Study,CAS,
-53,cicero.oslo.no,cicerooslono,&explicit/1-cicerooslono,CICERO Center for International Climate Research,CICERO,,,,,,,,,,https://ror.org/01gw5dy53,CICERO Center for International Climate Research,CICERO,
-54,cmr.no,cmrno,&explicit/1-cmrno,Christian Michelsen Research,CMR,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre
-55,dataverse.no,dataverseno,&explicit/1-dataverseno,,,2002-red-dataverseno-dvno,cloudian-dvno,,,,,,,,,,,The storage bucket 2002-red-dataverseno-dvno / cloudian-dvno is used for the root/top collection.
-56,DataverseNO Admin,DataverseNOAdmin,&explicit/1-DataverseNOAdmin,,,,,,,,,,,,,,,
-57,DataverseNO Curator,DvNOCurator,&explicit/1-DvNOCurator,,,,,,,,,,,,,,,
-58,DataverseNO Dataset Creator,DataverseNODatasetCreator,&explicit/1-DataverseNODatasetCreator,,,,,,,,,,,,,,,
-59,diakonova.no,diakonovano,&explicit/1-diakonovano,Diakonova,,,,,,,,,,,,,,
-60,fafo.no,fafono,&explicit/1-fafono,Fafo Foundation,,,,,,,,,,,https://ror.org/00ee9xb13,Fafo Foundation,,
-61,ffi.no,ffino,&explicit/1-ffino,Norwegian Defence Research Establishment,FFI,,,,,,,,,,https://ror.org/0098gnz32,Norwegian Defence Research Establishment,FFI,
-62,flymed.no,flymedno,&explicit/1-flymedno,Flymedisinsk institutt,,,,,,,,,,,,,,
-63,fni.no,fnino,&explicit/1-fnino,Fridtjof Nansen Institute,FNI,,,,,,,,,,https://ror.org/04ep2t954,Fridtjof Nansen Institute,FNI,
-64,genok.no,genokno,&explicit/1-genokno,GenØk – Centre for Biosafety,GenØk,,,,,,,,,,https://ror.org/027arfy53,GenØk,,
-65,hi.no,hino,&explicit/1-hino,Norwegian Institute of Marine Research,IMR,,,,,,,,,,https://ror.org/05vg74d16,Norwegian Institute of Marine Research,IMR,
-66,ife.no,ifeno,&explicit/1-ifeno,Institute for Energy Technology,IFE,,,,,,,,,,https://ror.org/02jqtg033,Institute for Energy Technology,IFE,
-67,iris.no,irisno,&explicit/1-irisno,International Research Institute of Stavanger,IRIS,,,,,,,,,,https://ror.org/0502t5s28,International Research Institute of Stavanger,IRIS,Now part of NORCE Norwegian Research Centre
-68,kifo.no,kifono,&explicit/1-kifono,"Institute for Church, Religion, and Worldview Research",KIFO,,,,,,,,,,https://ror.org/051p4t773,"Institute for Church, Religion, and Worldview Research",KIFO,
-69,kreftregisteret.no,kreftregisteretno,&explicit/1-kreftregisteretno,Cancer Registry of Norway,CRN,,,,,,,,,,https://ror.org/03sm1ej59,Cancer Registry of Norway,CRN,
-70,legeforeningen.no,legeforeningenno,&explicit/1-legeforeningenno,Den norske legeforening,,,,,,,,,,,,,,
-71,moreforsk.no,moreforskno,&explicit/1-moreforskno,Møreforsking,,,,,,,,,,,https://ror.org/02w4kss89,Møreforsking (Norway),,
-72,nersc.no,nerscno,&explicit/1-nerscno,Nansen Environmental and Remote Sensing Center,,,,,,,,,,,,,,
-73,nfms.no,nfmsno,&explicit/1-nfmsno,Aeromedical Center of Norway,,,,,,,,,,,,,,
-74,nforsk.no,nforskno,&explicit/1-nforskno,Nordland Research Institute,,,,,,,,,,,https://ror.org/02wvb2a30,Nordland Research Institute,,
-75,ngi.no,ngino,&explicit/1-ngino,Norwegian Geotechnical Institute,NGI,,,,,,,,,,https://ror.org/032ksge37,Norwegian Geotechnical Institute,NGI,
-76,niku.no,nikuno,&explicit/1-nikuno,Norwegian Institute for Cultural Heritage Research,NIKU,,,,,,,,,,https://ror.org/02xhrye98,Norwegian Institute for Cultural Heritage Research,NIKU,
-77,nilu.no,niluno,&explicit/1-niluno,Norwegian Institute for Air Research,NILU,,,,,,,,,,https://ror.org/00q7d9z06,Norwegian Institute for Air Research,NILU,
-78,niva.no,nivano,&explicit/1-nivano,Norwegian Institute for Water Research,NIVA,,,,,,,,,,https://ror.org/03hrf8236,Norwegian Institute for Water Research,NIVA,
-79,nlr.no,nlrno,&explicit/1-nlrno,Norsk Landbruksrådgiving,NLR,,,,,,,,,,https://ror.org/03c1zct07,Norsk Landbruksrådgiving,NLR,
-80,nobel.no,nobelno,&explicit/1-nobelno,Norwegian Nobel Institute,,,,,,,,,,,https://ror.org/055wgnw59,Norwegian Nobel Institute,,
-81,nofima.no,nofimano,&explicit/1-nofimano,Nofima,,2002-red-dataverseno-nofi,cloudian-nofima,,,,,,,,https://ror.org/02v1rsx93,Nofima,,
-82,norceresearch.no,norceresearchno,&explicit/1-norceresearchno,Norwegian Research Centre,NORCE,,,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE,
-83,norsar.no,norsarno,&explicit/1-norsarno,Norwegian Seismic Array,,,,,,,,,,,https://ror.org/02vw8cm83,Norsar,,
-84,norsok.no,norsokno,&explicit/1-norsokno,Norsk senter for økologisk landbruk,,,,,,,,,,,,,,
-85,norsus.no,norsusno,&explicit/1-norsusno,Norwegian Institute for Sustainability Research,NORSUS,,,,,,,,,,,,,
-86,norut.no,norutno,&explicit/1-norutno,Norut Northern Research Institute,Norut,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre
-87,nupi.no,nupino,&explicit/1-nupino,Norwegian Institute of International Affairs,NUPI,,,,,,,,,,https://ror.org/01pznaa94,Norwegian Institute of International Affairs,NUPI,
-88,ostfoldforskning.no,ostfoldforskningno,&explicit/1-ostfoldforskningno,Ostfold Research,,,,,,,,,,,https://ror.org/01vmqaq17,Ostfold Research (Norway),,Has changed name to
-89,ostforsk.no,ostforskno,&explicit/1-ostforskno,Eastern Norway Research Institute,ENRI,,,,,,,,,,https://ror.org/020deqr47,Eastern Norway Research Institute,ENRI,
-90,pfi.no,pfino,&explicit/1-pfino,Paper and Fibre Research Institute,PFI,,,,,,,,,,https://ror.org/053qb6g74,Paper and Fibre Research Institute,PFI,
-91,prio.org,prioorg,&explicit/1-prioorg,Peace Research Institute,PRIO,,,,,,,,,,https://ror.org/04dx54y73,Peace Research Institute,PRIO,
-92,risefr.no,risefrno,&explicit/1-risefrno,RISE Fire Research,RISE,,,,,,,,,,,,,
-93,ruralis.no,ruralisno,&explicit/1-ruralisno,Institute for Rural and Regional Research,CRR,,,,,,,,,,https://ror.org/0169gd037,Centre for Rural Research,CRR,Note: The ROR entry is not up to date.
-94,sik.no,sikno,&explicit/1-sikno,Centre for Intercultural Communication,SIK,,,,,,,,,,,,,Now part of VID
-95,snf.no,snfno,&explicit/1-snfno,Centre for Applied Research,,,,,,,,,,,,,,
-96,stami.no,stamino,&explicit/1-stamino,National Institute of Occupational Health,NIOH,,,,,,,,,,https://ror.org/04g3t6s80,National Institute of Occupational Health,NIOH,
-97,teknova.no,teknovano,&explicit/1-teknovano,Teknova,,,,,,,,,,,https://ror.org/02ekw8p73,Teknova,,Now part of NORCE Norwegian Research Centre
-98,tel-tek.no,tel-tekno,&explicit/1-tel-tekno,Tel-Tek,,,,,,,,,,,,,,Now part of SINTEF
-99,tfou.no,tfouno,&explicit/1-tfouno,Trøndelag Forskning og Utvikling,TFOU,,,,,,,,,,https://ror.org/01hw8wm79,Trøndelag Forskning og Utvikling (Norway),TFOU,Now part of SINTEF
-100,tisip.no,tisipno,&explicit/1-tisipno,TISIP,,,,,,,,,,,,,,
-101,tmforsk.no,tmforskno,&explicit/1-tmforskno,Telemark Research Institute,TRI,,,,,,,,,,https://ror.org/02jjgkb92,Telemark Research Institute,TRI,
-102,toi.no,toino,&explicit/1-toino,Institute of Transport Economics,TØI,,,,,,,,,,https://ror.org/04p2pa451,Institute of Transport Economics,TØI,
-103,treteknisk.no,tretekniskno,&explicit/1-tretekniskno,Norwegian Institute of Wood Technology,NTI,,,,,,,,,,https://ror.org/00d5qfz16,Norwegian Institute of Wood Technology,NTI,
-104,uni.no,unino,&explicit/1-unino,Uni Research,,,,,,,,,,,https://ror.org/016tr2j79,Uni Research (Norway),,Now part of NORCE Norwegian Research Centre
-105,vestforsk.no,vestforskno,&explicit/1-vestforskno,Western Norway Research Institute,WRNI,,,,,,,,,,https://ror.org/04z1q2j11,Vestlandsforsking,WRNI,
-106,westerdals.no,westerdalsno,&explicit/1-westerdalsno,"Westerdals Oslo School of Arts, Communication and Technology",,,,,,,,,,,https://ror.org/02re25503,"Westerdals Oslo School of Arts, Communication and Technology",,Now part of Kristiania
-107,unn.no,unnno,&explicit/1-unnno,University Hospital of North Norway,UNN,,,,,,,,,,https://ror.org/030v5kp38,University Hospital of North Norway,UNN,
-108,helse-vest.no,helse-vestno,&explicit/1-helse-vestno,Western Norway Regional Health Authority,,,,,,,,,,,https://ror.org/001212e83,Western Norway Regional Health Authority,,
-109,helse-forde.no,helse-fordeno,&explicit/1-helse-fordeno,Helse Førde,,,,,,,,,,,https://ror.org/05dzsmt79,Helse Førde,,
-110,helse-bergen.no,helse-bergenno,&explicit/1-helse-bergenno,Helse Bergen,,,,,,,,,,,,,,
-111,helse-fonna.no,helse-fonnano,&explicit/1-helse-fonnano,Helse Fonna,,,,,,,,,,,,,,
-112,sus.no,susno,&explicit/1-susno,Stavanger University Hospital,SUS,,,,,,,,,,https://ror.org/04zn72g03,Stavanger University Hospital,SUS,
-113,helse-midt.no,helse-midtno,&explicit/1-helse-midtno,Central Norway Regional Health Authority,,,,,,,,,,,https://ror.org/04t838f48,Central Norway Regional Health Authority,,
-114,helse-mr.no,helse-mrno,&explicit/1-helse-mrno,Helse Møre og Romsdal,,,,,,,,,,,https://ror.org/05ka2ew29,Helse Møre og Romsdal,,
-115,stolav.no,stolavno,&explicit/1-stolavno,St Olav's University Hospital,,,,,,,,,,,https://ror.org/01a4hbq44,St Olav's University Hospital,,
-116,hnt.no,hntno,&explicit/1-hntno,Helse Nord-Trøndelag,,,,,,,,,,,,,,
-117,helse-nord.no,helse-nordno,&explicit/1-helse-nordno,Northern Norway Regional Health Authority,,,,,,,,,,,https://ror.org/05f6c0c45,Northern Norway Regional Health Authority,,
-118,helgelandssykehuset.no,helgelandssykehusetno,&explicit/1-helgelandssykehusetno,Helgelandssykehuset,,,,,,,,,,,,,,
-119,finnmarkssykehuset.no,finnmarkssykehusetno,&explicit/1-finnmarkssykehusetno,Finnmarkssykehuset,,,,,,,,,,,https://ror.org/04z1ebj23,Finnmarkssykehuset,,
-120,nordlandssykehuset.no,nordlandssykehusetno,&explicit/1-nordlandssykehusetno,Nordland Hospital Trust,,,,,,,,,,,https://ror.org/04wjd1a07,Nordland Hospital Trust,,
-121,helse-sorost.no,helse-sorostno,&explicit/1-helse-sorostno,Southern and Eastern Norway Regional Health Authority,,,,,,,,,,,https://ror.org/02qx2s478,Southern and Eastern Norway Regional Health Authority,,
-122,ahus.no,ahusno,&explicit/1-ahusno,Akershus University Hospital,Ahus,,,,,,,,,,https://ror.org/0331wat71,Akershus University Hospital,,
-123,oslo-universitetssykehus.no,oslo-universitetssykehusno,&explicit/1-oslo-universitetssykehusno,Oslo University Hospital,,,,,,,,,,,https://ror.org/00j9c2840,Oslo University Hospital,,
+id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_abbreviation,dvno_alias,bucketname_in_cloudian,bucketname_in_dataverseno,feide_id,feide_email,feide_name,feide_nb,feide_nn,feide_en,feide_se,ror_id,ror_name,ror_acronym,comments
+1,phs.no,phsno,&explicit/1-phsno,The Norwegian Police University College,PHS,,,,54,phs.no,The Norwegian Police University College,Politihøgskolen,Politihøgskulen,The Norwegian Police University College,,https://ror.org/05486d596,Norwegian Police University College,PHS,
+2,uia.no,uiano,&explicit/1-uiano,University of Agder,UiA,uia,2002-red-dvno,cloudian-dvno,55,uia.no,University of Agder,Universitetet i Agder,Universitetet i Agder,University of Agder,Agder Universitehta,https://ror.org/03x297z98,University of Agder,,
+3,nifu.no,nifuno,&explicit/1-nifuno,"Nordic Institute for Studies innovation, research and education",NIFU,,,,58,nifu.no,NIFU,NIFU,NIFU,NIFU,,,,,
+4,hiof.no,hiofno,&explicit/1-hiofno,Østfold University College,HiØ,hiof,2002-red-dvno,cloudian-dvno,70,hiof.no,Østfold University College,Høgskolen i Østfold,Høgskulen i Østfold,Østfold University College,Østfold Allaskuvla,https://ror.org/04gf7fp41,Østfold University College,HiØ,
+5,aho.no,ahono,&explicit/1-ahono,The Oslo School of Architecture and Design,AHO,,,,93,aho.no,The Oslo School of Architecture and Design,Arkitektur- og designhøgskolen i Oslo,Arkitektur- og designhøgskulen i Oslo,The Oslo School of Architecture and Design,,,,,
+6,cmi.no,cmino,&explicit/1-cmino,Chr. Michelsen Institute,CMI,,,,96,cmi.no,Chr. Michelsen Institute,Chr. Michelsens Institutt,Chr. Michelsens Institutt,Chr. Michelsen Institute,,https://ror.org/02w7rbf39,Christian Michelsen Institute,CMI,
+7,mf.no,mfno,&explicit/1-mfno,"MF Norwegian School of Theology, Religion and Society",MF,mf,2002-red-dvno,cloudian-dvno,100,mf.no,"MF Norwegian School of Theology, Religion and Society",MF vitenskapelig høyskole,MF vitenskapelig høyskole,"MF Norwegian School of Theology, Religion and Society",,https://ror.org/01qafy255,"MF Norwegian School of Theology, Religion and Society",,
+8,dmmh.no,dmmhno,&explicit/1-dmmhno,Queen Maud University College,DMMH,,,,103,dmmh.no,Queen Maud University College,Dronning Mauds Minne Høgskole,Dronning Mauds Minne Høgskule,Queen Maud University College,,https://ror.org/043zemc40,Queen Maud University College,DMMH,
+9,nhh.no,nhhno,&explicit/1-nhhno,Norwegian School of Economics,NHH,,,,119,nhh.no,Norwegian School of Economics,Norges Handelshøyskole,Norges Handelshøyskole,Norwegian School of Economics,,https://ror.org/04v53s997,Norwegian School of Economics,NHH,
+10,nla.no,nlano,&explicit/1-nlano,NLA University College,NLA,,,,123,nla.no,NLA University College,NLA Høgskolen,NLA Høgskolen,NLA University College,NLA Høgskolen,https://ror.org/05fdt2q64,NLA University College,,
+11,npolar.no,npolarno,&explicit/1-npolarno,Norwegian Polar Institute,NPI,,,,124,,Norwegian Polar Institute,Norsk Polarinstitutt,,Norwegian Polar Institute,,https://ror.org/03avf6522,Norwegian Polar Institute,,
+12,nr.no,nrno,&explicit/1-nrno,Norwegian Computing Center,NR,,,,125,nr.no,Norsk Regnesentral,Norsk Regnesentral,,,,https://ror.org/02gm7te43,Norwegian Computing Center,NR,
+13,sintef.no,sintefno,&explicit/1-sintefno,SINTEF,SINTEF,,,,131,sintef.no,SINTEF,SINTEF,SINTEF,SINTEF,,https://ror.org/01f677e56,SINTEF,,
+14,samiskhs.no,samiskhsno,&explicit/1-samiskhsno,Sámi allaskuvla – Sámi University College,Sami,,,,133,samas.no,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,Sámi allaskuvla – Samisk høgskule,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,https://ror.org/028ahgk39,Sámi University of Applied Sciences,,
+15,uib.no,uibno,&explicit/1-uibno,University of Bergen,UiB,uib,2002-red-dvno,cloudian-dvno,137,uib.no,University of Bergen,Universitetet i Bergen,Universitetet i Bergen,University of Bergen,Bergen Universitehta,https://ror.org/03zga2b32,University of Bergen,,
+16,uio.no,uiono,&explicit/1-uiono,University of Oslo,UiO,uio,2002-red-dvno,cloudian-dvno,138,uio.no,University of Oslo,Universitetet i Oslo,Universitetet i Oslo,University of Oslo,Oslo Universitehta,https://ror.org/01xtthb56,University of Oslo,UiO,
+17,uit.no,uitno,&explicit/1-uitno,UiT The Arctic University of Norway,UiT,uit,2002-red-dvno,cloudian-dvno,139,uit.no,UiT The Arctic University of Norway,UiT Norges arktiske universitet,UiT Noregs arktiske universitet,UiT The Arctic University of Norway,UiT Norgga árktalaš universitehta,https://ror.org/00wge5k78,UiT The Arctic University of Norway,UiT,
+18,ntnu.no,ntnuno,&explicit/1-ntnuno,Norwegian University of Science and Technology,NTNU,ntnu,2002-red-dvno,cloudian-dvno,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU,
+19,nina.no,ninano,&explicit/1-ninano,Norwegian Institute for Nature Research,NINA,,,,144,nina.no,NINA,NINA,NINA,,,https://ror.org/04aha0598,Norwegian Institute for Nature Research,NINA,
+20,ngu.no,nguno,&explicit/1-nguno,Geological Survey of Norway,NGU,,,,145,ngu.no,Geological Survey of Norway,Norges geologiske undersøkelse,Norges geologiske undersøkelse,Geological Survey of Norway,,,,,
+21,himolde.no,himoldeno,&explicit/1-himoldeno,Molde University College,HiM,,,,158,himolde.no,Molde University College,Høgskolen i Molde,Høgskulen i Molde,Molde University College,Molde Allaskuvla,https://ror.org/00kxjcd28,Molde University College,HiM,
+22,nb.no,nbno,&explicit/1-nbno,National Library of Norway,NB,,,,160,nb.no,National Library of Norway,Nasjonalbiblioteket,Nasjonalbiblioteket,National Library of Norway,,,,,
+23,uis.no,uisno,&explicit/1-uisno,University of Stavanger,UiS,uis,2002-red-dvno,cloudian-dvno,163,uis.no,University of Stavanger,Universitetet i Stavanger,Universitetet i Stavanger,University of Stavanger,Stavanger Universitehta,https://ror.org/02qte9q33,University of Stavanger,UiS,
+24,hivolda.no,hivoldano,&explicit/1-hivoldano,Volda University College,HVO,,,,165,hivolda.no,Volda University College,Høgskulen i Volda,Høgskulen i Volda,Volda University College,Volda Allaskuvla,https://ror.org/01eeqzy24,Volda University College,HVO,
+25,khio.no,khiono,&explicit/1-khiono,Oslo National Academy of the Arts,KhiO,,,,195,khio.no,Oslo National Academy of the Arts,Kunsthøgskolen i Oslo,Kunsthøgskulen i Oslo,Oslo National Academy of the Arts,,https://ror.org/0543h9a62,Oslo National Academy of the Arts,,
+26,samfunnsforskning.no,samfunnsforskningno,&explicit/1-samfunnsforskningno,Institute for Social Research,IFS,,,,197,samfunnsforskning.no,Institute for Social Research,Institutt for samfunnsforskning,,Institute for Social Research,,https://ror.org/05swz5441,Institute for Social Research,IFS,
+27,ldh.no,ldhno,&explicit/1-ldhno,Lovisenberg Diaconal University College,LDH,,,,216,ldh.no,Lovisenberg diaconal university college,Lovisenberg diakonale høgskole,Lovisenberg diakonale høgskule,Lovisenberg diaconal university college,,https://ror.org/015rzvz05,Lovisenberg Diakonale Høgskole,LDH,
+28,fhi.no,fhino,&explicit/1-fhino,Norwegian Institute of Public Health,NIPH,,,,310,fhi.no,Norwegian Institute of Public Health,Folkehelseinstituttet,,Norwegian Institute of Public Health,,https://ror.org/046nvst19,Norwegian Institute of Public Health,NIPH,
+29,nih.no,nihno,&explicit/1-nihno,Norwegian School of Sport Sciences,NSSS,,,,323,nih.no,Norwegian School of Sport Sciences,Norges idrettshøgskole,Noregs idrettshøgskule,Norwegian School of Sport Sciences,,https://ror.org/045016w83,Norwegian School of Sport Sciences,NSSS,
+30,bi.no,bino,&explicit/1-bino,BI Norwegian Business School,BI,,,,324,bi.no,BI Norwegian Business School,Handelshøyskolen BI,Handelshøyskolen BI,BI Norwegian Business School,,https://ror.org/03ez40v33,BI Norwegian Business School,,
+31,nmh.no,nmhno,&explicit/1-nmhno,Norwegian Academy of Music,NMH,,,,325,nmh.no,Norwegian Academy of Music,Norges musikkhøgskole,Noregs musikkhøgskule,Norwegian Academy of Music,,https://ror.org/052dy9793,Norwegian Academy of Music,NMH,
+32,kristiania.no,kristianiano,&explicit/1-kristianiano,Kristiania University College,Kristiania,,,,17007,feide.egms.no,Kristiania University College,Høyskolen Kristiania,Høyskolen Kristiania,Kristiania University College,,https://ror.org/03gss5916,Campus Kristiania,,
+33,fhs.mil.no,fhsmilno,&explicit/1-fhsmilno,Norwegian Defence University College,NDUC,,,,115267,mil.no,Norwegian Defence University College,Forsvarets høgskoler,Forsvarets høgskuler,Norwegian Defence University College,,https://ror.org/02vfz9j23,Norwegian Defence University College,NDUC,
+34,ansgarskolen.no,ansgarskolenno,&explicit/1-ansgarskolenno,Ansgar University College,Ansgar,,,,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,,
+35,oslomet.no,oslometno,&explicit/1-oslometno,OsloMet – Oslo Metropolitan University,OsloMet,,,,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA,
+36,nmbu.no,nmbuno,&explicit/1-nmbuno,Norwegian University of Life Sciences,NMBU,nmbu,2002-red-dvno,cloudian-dvno,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU,
+37,nibio.no,nibiono,&explicit/1-nibiono,Norwegian Institute of Bioeconomy Research,NIBIO,,,,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO,
+38,vid.no,vidno,&explicit/1-vidno,VID Specialized University,VID,vid,2002-red-dvno,cloudian-dvno,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID,
+39,nord.no,nordno,&explicit/1-nordno,Nord University,NORD,nord,2002-red-dvno,cloudian-dvno,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,,
+40,usn.no,usnno,&explicit/1-usnno,University of South-Eastern Norway,USN,,,,2066647,usn.no,University of South-Eastern Norway,Universitetet i Sørøst-Norge,Universitetet i Søraust-Noreg,University of South-Eastern Norway,,https://ror.org/05ecg5h20,University of South-Eastern Norway,USN,
+41,hvl.no,hvlno,&explicit/1-hvlno,Western Norway University of Applied Sciences,HVL,hvl,2002-red-dvno,cloudian-dvno,2126357,hvl.no,Western Norway University of Applied Sciences,Høgskulen på Vestlandet,Høgskulen på Vestlandet,Western Norway University of Applied Sciences,,https://ror.org/05phns765,Western Norway University of Applied Sciences,HVL,
+42,nkvts.no,nkvtsno,&explicit/1-nkvtsno,Norwegian centre for violence and traumatic stress studies,NKVTS,,,,2127917,nkvts.no,Norwegian centre for violence and traumatic stress studies,Nasjonalt kunnskapssenter om vold og traumatisk stress,,Norwegian centre for violence and traumatic stress studies,,https://ror.org/01p618c36,Norwegian Centre for Violence and Traumatic Stress Studies,NKVTS,
+43,inn.no,innno,&explicit/1-innno,Inland Norway University of Applied Sciences,INN,inn,2002-red-dvno,cloudian-dvno,2128215,inn.no,Inland Norway University of Applied Sciences,Høgskolen i Innlandet,,Inland Norway University of Applied Sciences,,https://ror.org/02dx4dc92,Inland Norway University of Applied Sciences,,
+44,vetinst.no,vetinstno,&explicit/1-vetinstno,Norwegian Veterinary Institute,NVI,,,,2217125,vetinst.no,Veterinærinstituttet,Veterinærinstituttet,,,,https://ror.org/05m6y3182,Norwegian Veterinary Institute,NVI,
+45,nubu.no,nubuno,&explicit/1-nubuno,NUBU - The Norwegian Center for Child Behavioral Development,NUBU,,,,2217221,nubu.no,NUBU - The Norwegian Center for Child Behavioral Development,NUBU - Nasjonalt utviklingssenter for barn og unge,,NUBU - The Norwegian Center for Child Behavioral Development,,,,,
+46,hlsenteret.no,hlsenteretno,&explicit/1-hlsenteretno,The Norwegian Center for Holocaust and Minority Studies,HLS,,,,2217222,hlsenteret.no,The Norwegian Center for Holocaust and Minority Studies,Senter for studier av Holocaust og livssynsminoriteter,,The Norwegian Center for Holocaust and Minority Studies,,https://ror.org/03ppkyp25,Center for Studies of Holocaust and Religious Minorities,,
+47,met.no,metno,&explicit/1-metno,Norwegian Meteorological Institute,MET,,,,2217341,,Meteorologisk Institutt,Meteorologisk Institutt,,,,https://ror.org/001n36p86,Norwegian Meteorological Institute,MET,
+48,simula.no,simulano,&explicit/1-simulano,Simula Research Laboratory,Simula,,,,2217477,simula.no,Simula,Simula,,,,https://ror.org/00vn06n10,Simula Research Laboratory,,
+49,agderforskning.no,agderforskningno,&explicit/1-agderforskningno,Agder Research,Agder,,,,,,,,,,,https://ror.org/02k3w5n89,Agder Research,,
+50,akvaplan.niva.no,akvaplannivano,&explicit/1-akvaplannivano,Akvaplan-niva,Akvaplan,,,,,,,,,,,https://ror.org/03nrps502,Akvaplan-niva (Norway),,
+51,arbark.no,arbarkno,&explicit/1-arbarkno,Norwegian Labour Movement Archives and Library,ARBARK,,,,,,,,,,,https://ror.org/05x91m338,Norwegian Labour Movement Archives and Library,,
+52,cas.oslo.no,casoslono,&explicit/1-casoslono,Centre for Advanced Study,CAS,,,,,,,,,,,https://ror.org/05rbhea42,Centre for Advanced Study,CAS,
+53,cicero.oslo.no,cicerooslono,&explicit/1-cicerooslono,CICERO Center for International Climate Research,CICERO,,,,,,,,,,,https://ror.org/01gw5dy53,CICERO Center for International Climate Research,CICERO,
+54,cmr.no,cmrno,&explicit/1-cmrno,Christian Michelsen Research,CMR,,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre
+55,dataverse.no,dataverseno,&explicit/1-dataverseno,,_Ikkje-partnarar,root,2002-red-dvno,cloudian-dvno,,,,,,,,,,,The storage bucket 2002-red-dataverseno-dvno / cloudian-dvno is used for the root/top collection.
+56,DataverseNO Admin,DataverseNOAdmin,&explicit/1-DataverseNOAdmin,,,,,,,,,,,,,,,,
+57,DataverseNO Curator,DvNOCurator,&explicit/1-DvNOCurator,,,,,,,,,,,,,,,,
+58,DataverseNO Dataset Creator,DataverseNODatasetCreator,&explicit/1-DataverseNODatasetCreator,,,,,,,,,,,,,,,,
+59,diakonova.no,diakonovano,&explicit/1-diakonovano,Diakonova,Diakonova,,,,,,,,,,,,,,
+60,fafo.no,fafono,&explicit/1-fafono,Fafo Foundation,Fafo,,,,,,,,,,,https://ror.org/00ee9xb13,Fafo Foundation,,
+61,ffi.no,ffino,&explicit/1-ffino,Norwegian Defence Research Establishment,FFI,,,,,,,,,,,https://ror.org/0098gnz32,Norwegian Defence Research Establishment,FFI,
+62,flymed.no,flymedno,&explicit/1-flymedno,Flymedisinsk institutt,Flymed,,,,,,,,,,,,,,
+63,fni.no,fnino,&explicit/1-fnino,Fridtjof Nansen Institute,FNI,,,,,,,,,,,https://ror.org/04ep2t954,Fridtjof Nansen Institute,FNI,
+64,genok.no,genokno,&explicit/1-genokno,GenØk – Centre for Biosafety,GenØk,,,,,,,,,,,https://ror.org/027arfy53,GenØk,,
+65,hi.no,hino,&explicit/1-hino,Norwegian Institute of Marine Research,IMR,,,,,,,,,,,https://ror.org/05vg74d16,Norwegian Institute of Marine Research,IMR,
+66,ife.no,ifeno,&explicit/1-ifeno,Institute for Energy Technology,IFE,,,,,,,,,,,https://ror.org/02jqtg033,Institute for Energy Technology,IFE,
+67,iris.no,irisno,&explicit/1-irisno,International Research Institute of Stavanger,IRIS,,,,,,,,,,,https://ror.org/0502t5s28,International Research Institute of Stavanger,IRIS,Now part of NORCE Norwegian Research Centre
+68,kifo.no,kifono,&explicit/1-kifono,"Institute for Church, Religion, and Worldview Research",KIFO,,,,,,,,,,,https://ror.org/051p4t773,"Institute for Church, Religion, and Worldview Research",KIFO,
+69,kreftregisteret.no,kreftregisteretno,&explicit/1-kreftregisteretno,Cancer Registry of Norway,CRN,,,,,,,,,,,https://ror.org/03sm1ej59,Cancer Registry of Norway,CRN,
+70,legeforeningen.no,legeforeningenno,&explicit/1-legeforeningenno,Den norske legeforening,Lege,,,,,,,,,,,,,,
+71,moreforsk.no,moreforskno,&explicit/1-moreforskno,Møreforsking,Møre,,,,,,,,,,,https://ror.org/02w4kss89,Møreforsking (Norway),,
+72,nersc.no,nerscno,&explicit/1-nerscno,Nansen Environmental and Remote Sensing Center,NERSC,,,,,,,,,,,,,,
+73,nfms.no,nfmsno,&explicit/1-nfmsno,Aeromedical Center of Norway,NMFS,,,,,,,,,,,,,,
+74,nforsk.no,nforskno,&explicit/1-nforskno,Nordland Research Institute,Nordland,,,,,,,,,,,https://ror.org/02wvb2a30,Nordland Research Institute,,
+75,ngi.no,ngino,&explicit/1-ngino,Norwegian Geotechnical Institute,NGI,,,,,,,,,,,https://ror.org/032ksge37,Norwegian Geotechnical Institute,NGI,
+76,niku.no,nikuno,&explicit/1-nikuno,Norwegian Institute for Cultural Heritage Research,NIKU,,,,,,,,,,,https://ror.org/02xhrye98,Norwegian Institute for Cultural Heritage Research,NIKU,
+77,nilu.no,niluno,&explicit/1-niluno,Norwegian Institute for Air Research,NILU,,,,,,,,,,,https://ror.org/00q7d9z06,Norwegian Institute for Air Research,NILU,
+78,niva.no,nivano,&explicit/1-nivano,Norwegian Institute for Water Research,NIVA,,,,,,,,,,,https://ror.org/03hrf8236,Norwegian Institute for Water Research,NIVA,
+79,nlr.no,nlrno,&explicit/1-nlrno,Norsk Landbruksrådgiving,NLR,,,,,,,,,,,https://ror.org/03c1zct07,Norsk Landbruksrådgiving,NLR,
+80,nobel.no,nobelno,&explicit/1-nobelno,Norwegian Nobel Institute,Nobel,,,,,,,,,,,https://ror.org/055wgnw59,Norwegian Nobel Institute,,
+81,nofima.no,nofimano,&explicit/1-nofimano,Nofima,Nofima,nofima,2002-red-dvno,cloudian-dvno,,,,,,,,https://ror.org/02v1rsx93,Nofima,,
+82,norceresearch.no,norceresearchno,&explicit/1-norceresearchno,Norwegian Research Centre,NORCE,,,,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE,
+83,norsar.no,norsarno,&explicit/1-norsarno,Norwegian Seismic Array,NORSAR,,,,,,,,,,,https://ror.org/02vw8cm83,Norsar,,
+84,norsok.no,norsokno,&explicit/1-norsokno,Norwegian Centre for Organic Agriculture,NORSØK,,,,,,,,,,,,,,
+85,norsus.no,norsusno,&explicit/1-norsusno,Norwegian Institute for Sustainability Research,NORSUS,,,,,,,,,,,,,,
+86,norut.no,norutno,&explicit/1-norutno,Norut Northern Research Institute,Norut,,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre
+87,nupi.no,nupino,&explicit/1-nupino,Norwegian Institute of International Affairs,NUPI,,,,,,,,,,,https://ror.org/01pznaa94,Norwegian Institute of International Affairs,NUPI,
+88,ostfoldforskning.no,ostfoldforskningno,&explicit/1-ostfoldforskningno,Ostfold Research,Østfold,,,,,,,,,,,https://ror.org/01vmqaq17,Ostfold Research (Norway),,Has changed name to
+89,ostforsk.no,ostforskno,&explicit/1-ostforskno,Eastern Norway Research Institute,ENRI,,,,,,,,,,,https://ror.org/020deqr47,Eastern Norway Research Institute,ENRI,
+90,pfi.no,pfino,&explicit/1-pfino,Paper and Fibre Research Institute,PFI,,,,,,,,,,,https://ror.org/053qb6g74,Paper and Fibre Research Institute,PFI,
+91,prio.org,prioorg,&explicit/1-prioorg,Peace Research Institute,PRIO,,,,,,,,,,,https://ror.org/04dx54y73,Peace Research Institute,PRIO,
+92,risefr.no,risefrno,&explicit/1-risefrno,RISE Fire Research,RISE,,,,,,,,,,,,,,
+93,ruralis.no,ruralisno,&explicit/1-ruralisno,Institute for Rural and Regional Research,CRR,,,,,,,,,,,https://ror.org/0169gd037,Centre for Rural Research,CRR,Note: The ROR entry is not up to date.
+94,sik.no,sikno,&explicit/1-sikno,Centre for Intercultural Communication,SIK,,,,,,,,,,,,,,Now part of VID
+95,snf.no,snfno,&explicit/1-snfno,Centre for Applied Research,SNF,,,,,,,,,,,,,,
+96,stami.no,stamino,&explicit/1-stamino,National Institute of Occupational Health,NIOH,,,,,,,,,,,https://ror.org/04g3t6s80,National Institute of Occupational Health,NIOH,
+97,teknova.no,teknovano,&explicit/1-teknovano,Teknova,Teknova,,,,,,,,,,,https://ror.org/02ekw8p73,Teknova,,Now part of NORCE Norwegian Research Centre
+98,tel-tek.no,tel-tekno,&explicit/1-tel-tekno,Tel-Tek,Tel-Tek,,,,,,,,,,,,,,Now part of SINTEF
+99,tfou.no,tfouno,&explicit/1-tfouno,Trøndelag Forskning og Utvikling,TFOU,,,,,,,,,,,https://ror.org/01hw8wm79,Trøndelag Forskning og Utvikling (Norway),TFOU,Now part of SINTEF
+100,tisip.no,tisipno,&explicit/1-tisipno,TISIP,TISIP,,,,,,,,,,,,,,
+101,tmforsk.no,tmforskno,&explicit/1-tmforskno,Telemark Research Institute,TRI,,,,,,,,,,,https://ror.org/02jjgkb92,Telemark Research Institute,TRI,
+102,toi.no,toino,&explicit/1-toino,Institute of Transport Economics,TØI,,,,,,,,,,,https://ror.org/04p2pa451,Institute of Transport Economics,TØI,
+103,treteknisk.no,tretekniskno,&explicit/1-tretekniskno,Norwegian Institute of Wood Technology,NTI,,,,,,,,,,,https://ror.org/00d5qfz16,Norwegian Institute of Wood Technology,NTI,
+104,uni.no,unino,&explicit/1-unino,Uni Research,Uni,,,,,,,,,,,https://ror.org/016tr2j79,Uni Research (Norway),,Now part of NORCE Norwegian Research Centre
+105,vestforsk.no,vestforskno,&explicit/1-vestforskno,Western Norway Research Institute,WRNI,,,,,,,,,,,https://ror.org/04z1q2j11,Vestlandsforsking,WRNI,
+106,westerdals.no,westerdalsno,&explicit/1-westerdalsno,"Westerdals Oslo School of Arts, Communication and Technology",Westerdals,,,,,,,,,,,https://ror.org/02re25503,"Westerdals Oslo School of Arts, Communication and Technology",,Now part of Kristiania
+107,unn.no,unnno,&explicit/1-unnno,University Hospital of North Norway,UNN,,,,,,,,,,,https://ror.org/030v5kp38,University Hospital of North Norway,UNN,
+108,helse-vest.no,helse-vestno,&explicit/1-helse-vestno,Western Norway Regional Health Authority,Helse Vest,,,,,,,,,,,https://ror.org/001212e83,Western Norway Regional Health Authority,,
+109,helse-forde.no,helse-fordeno,&explicit/1-helse-fordeno,Helse Førde,Helse Førde,,,,,,,,,,,https://ror.org/05dzsmt79,Helse Førde,,
+110,helse-bergen.no,helse-bergenno,&explicit/1-helse-bergenno,Helse Bergen,Helse Bergen,,,,,,,,,,,,,,
+111,helse-fonna.no,helse-fonnano,&explicit/1-helse-fonnano,Helse Fonna,Helse Fonna,,,,,,,,,,,,,,
+112,sus.no,susno,&explicit/1-susno,Stavanger University Hospital,SUS,,,,,,,,,,,https://ror.org/04zn72g03,Stavanger University Hospital,SUS,
+113,helse-midt.no,helse-midtno,&explicit/1-helse-midtno,Central Norway Regional Health Authority,Helse Midt,,,,,,,,,,,https://ror.org/04t838f48,Central Norway Regional Health Authority,,
+114,helse-mr.no,helse-mrno,&explicit/1-helse-mrno,Helse Møre og Romsdal,Helse MR,,,,,,,,,,,https://ror.org/05ka2ew29,Helse Møre og Romsdal,,
+115,stolav.no,stolavno,&explicit/1-stolavno,St Olav's University Hospital,St. Olav,,,,,,,,,,,https://ror.org/01a4hbq44,St Olav's University Hospital,,
+116,hnt.no,hntno,&explicit/1-hntno,Helse Nord-Trøndelag,Helse NT,,,,,,,,,,,,,,
+117,helse-nord.no,helse-nordno,&explicit/1-helse-nordno,Northern Norway Regional Health Authority,Helse Nord,,,,,,,,,,,https://ror.org/05f6c0c45,Northern Norway Regional Health Authority,,
+118,helgelandssykehuset.no,helgelandssykehusetno,&explicit/1-helgelandssykehusetno,Helgelandssykehuset,Helgeland,,,,,,,,,,,,,,
+119,finnmarkssykehuset.no,finnmarkssykehusetno,&explicit/1-finnmarkssykehusetno,Finnmarkssykehuset,Finnmark,,,,,,,,,,,https://ror.org/04z1ebj23,Finnmarkssykehuset,,
+120,nordlandssykehuset.no,nordlandssykehusetno,&explicit/1-nordlandssykehusetno,Nordland Hospital Trust,Nordland,,,,,,,,,,,https://ror.org/04wjd1a07,Nordland Hospital Trust,,
+121,helse-sorost.no,helse-sorostno,&explicit/1-helse-sorostno,Southern and Eastern Norway Regional Health Authority,Helse SØ,,,,,,,,,,,https://ror.org/02qx2s478,Southern and Eastern Norway Regional Health Authority,,
+122,ahus.no,ahusno,&explicit/1-ahusno,Akershus University Hospital,Ahus,,,,,,,,,,,https://ror.org/0331wat71,Akershus University Hospital,,
+123,oslo-universitetssykehus.no,oslo-universitetssykehusno,&explicit/1-oslo-universitetssykehusno,Oslo University Hospital,Oslo,,,,,,,,,,,https://ror.org/00j9c2840,Oslo University Hospital,,
+124,fjellhaug.no,fjellhaugno,&explicit/1-fjellhaugno,Fjellhaug International University College,FIUC,,,,117700,fjellhaug.no,Fjellhaug International University College,Fjellhaug Internasjonale Høgskole,Fjellhaug Internasjonale Høgskule,Fjellhaug International University College,,https://ror.org/00j9c2840,Fjellhaug International University College,FIH,
+125,vea-fs.no,vea-fsno,&explicit/1-vea-fsno,Norway’s green vocational school,Vea,,,,,,,,,,,,,,
+126,bdm.no,bdmno,&explicit/1-bdmno,Barratt Due Institute of Music,BDM,,,,,,,,,,,https://ror.org/05dqc2261,Barratt Due,,
+127,bas.org,basorg,&explicit/1-basorg,Bergen School of Architecture,BAS,,,,,,,,,,,https://ror.org/00g8zjy95,Bergen School of Architecture,,
+128,steinerhoyskolen.no,steinerhoyskolenno,&explicit/1-steinerhoyskolenno,Rudolf Steiner University College,Steiner,,,,,,,,,,,https://ror.org/00kxk0k30,Rudolf Steiner University College,,
+129,amh.no,amhno,&explicit/1-amhno,Atlantis Medisinske Høgskole,AMH,,,,,,,,,,,,,,
+130,hgut.no,hgutno,&explicit/1-hgutno,Høgskulen for grøn utvikling,HGUt,,,,,,,,,,,,,,
+131,hfdk.no,hfdkno,&explicit/1-hfdkno,Høyskolen for dansekunst,HFDK,,,,,,,,,,,,,,
+132,hlt.no,hltno,&explicit/1-hltno,Norwegian School of Leadership and Theology,HLT,,,,,,,,,,,,,,
+133,hfy.no,hfyno,&explicit/1-hfyno,University College of Vocational Education,HØFY,,,,,,,,,,,https://ror.org/04r8kt465,University College of Vocational Education,,
+134,krus.no,krusno,&explicit/1-krusno,University College of Norwegian Correctional Service,KRUS,,,,2169824,krus.no,Kriminalomsorgens høgskole og utdanningssenter KRUS,Kriminalomsorgens høgskole og utdanningssenter KRUS,,,,https://ror.org/020mpkg22,University College of Norwegian Correctional Service,,
+135,limpimusic.com,limpimusiccom,&explicit/1-limpimusiccom,Lillehammer Institute of Music Production and Industries,Limpi,,,,,,,,,,,,,,
+136,noroff.no,noroffno,&explicit/1-noroffno,Noroff School of technology and digital media,Noroff,,,,,,,,,,,,,,
+137,barnebokinstituttet.no,barnebokinstituttetno,&explicit/1-barnebokinstituttetno,The Norwegian Institute for Children’s Books,NBI,,,,,,,,,,,https://ror.org/03s2agk53,The Norwegian Institute for Children’s Books,,
+138,gestalt.no,gestaltno,&explicit/1-gestaltno,Norwegian Gestalt Institute,NGI,,,,,,,,,,,,,,
+139,nski.no,nskino,&explicit/1-nskino,NSKI University College,NSKI,,,,,,,,,,,,,,
+140,oslonh.no,oslonhno,&explicit/1-oslonhno,Oslo New University College,ONH,,,,,,,,,,,,,,
+141,skrivekunst.no,skrivekunstno,&explicit/1-skrivekunstno,Skrivekunstakademiet,Skrivekunst,,,,,,,,,,,,,,
+142,unis.no,unisno,&explicit/1-unisno,University Centre in Svalbard,UNIS,,,,61,unis.no,The University Center in Svalbard,Universitetssenteret på Svalbard,,The University Center in Svalbard,,https://ror.org/03cyjf656,University Centre in Svalbard,UNIS,
+143,rise-pfi.no,rise-pfino,&explicit/1-rise-pfino,RISE PFI,RISE PFI,,,,,,,,,,,,,,
+144,aquateam.no,aquateamno,&explicit/1-aquateamno,Aquateam COWI,Aquateam,,,,,,,,,,,,,,
+145,dsa.no,dsano,&explicit/1-dsano,Norwegian Radiation and Nuclear Safety Authority,DSA,,,,,,,,,,,https://ror.org/039kcn609,Norwegian Radiation and Nuclear Safety Authority,DSA,
+146,arkivverket.no,arkivverketno,&explicit/1-arkivverketno,National Archives of Norway,Arkivverket,,,,,,,,,,,,,,
+147,niom.no,niomno,&explicit/1-niomno,Nordic Institute of Dental Materials,NIOM,,,,,,,,,,,https://ror.org/015xbps36,Nordic Institute of Dental Materials,,
+148,norges-bank.no,norges-bankno,&explicit/1-norges-bankno,Central Bank of Norway,NB,,,,,,,,,,,https://ror.org/01v408m73,Central Bank of Norway,,
+149,nve.no,nveno,&explicit/1-nveno,Norwegian Water Resources and Energy Directorate,NVE,,,,,,,,,,,https://ror.org/02syy7986,Norwegian Water Resources and Energy Directorate,,
+150,norner.no,nornerno,&explicit/1-nornerno,Norner,Norner,,,,,,,,,,,https://ror.org/05ew68y43,Norner,,
+151,norskfolkemuseum.no,norskfolkemuseumno,&explicit/1-norskfolkemuseumno,Norsk Folkemuseum,Norsk Folkemuseum,,,,,,,,,,,https://ror.org/02t6kpd72,Norsk Folkemuseum,,
+152,kartverket.no,kartverketno,&explicit/1-kartverketno,Norwegian Mapping Authority,NMA,,,,,,,,,,,https://ror.org/05dz27378,Norwegian Mapping Authority,,
+153,ssb.no,ssbno,&explicit/1-ssbno,Statistics Norway,SSB,,,,,,,,,,,https://ror.org/02e50va28,Statistics Norway,,
+154,arkivet.no,arkivetno,&explicit/1-arkivetno,ARKIVET Peace and Human Rights Centre,ARKIVET,,,,,,,,,,,,,,
+155,tekniskmuseum.no,tekniskmuseumno,&explicit/1-tekniskmuseumno,Norwegian Museum of Science and Technology,,,,,,,,,,,,https://ror.org/00zave958,Norwegian Museum of Science and Technology,,
From c4862789c5c4055ece305e26f09727afe1a86844 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 23 Mar 2023 10:12:10 +0000
Subject: [PATCH 262/354] remmoved non fuctional "null" entries from geodata
---
.../migration/_dvno_geolocation_cleaning_v5_13.sql | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
index 9eb8d5b..7399e71 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
@@ -382,10 +382,10 @@ update datasetfieldvalue set value='19.216667' where id=21590;
update datasetfieldvalue set value='69.583333' where id=21606;
update datasetfieldvalue set value='19.216667' where id=21610;
update datasetfieldvalue set value='69.583333' where id=21611;
-update datasetfieldvalue set value=null where id=21626;
-update datasetfieldvalue set value=null where id=21636;
-update datasetfieldvalue set value=null where id=21644;
-update datasetfieldvalue set value=null where id=21651;
+update datasetfieldvalue set value=69.0 where id=21626;
+update datasetfieldvalue set value=70.90 where id=21636;
+update datasetfieldvalue set value=31.05 where id=21644;
+update datasetfieldvalue set value=28.17 where id=21651;
update datasetfieldvalue set value='69.583333' where id=21954;
update datasetfieldvalue set value='69.583333' where id=21962;
update datasetfieldvalue set value='19.216667' where id=21969;
From a554097e6f79d71b5de1ec53f9dc316c23591d6a Mon Sep 17 00:00:00 2001
From: Louis-wr
Date: Tue, 28 Mar 2023 08:47:29 +0200
Subject: [PATCH 263/354] added mising broken datasets
---
.../_dvno_geolocation_cleaning_v5_13.sql | 20 +++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
index 7399e71..ede4eac 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql
@@ -3271,3 +3271,23 @@ update datasetfieldvalue set value='23.916667' where id=134095;
update datasetfieldvalue set value='31' where id=134106;
update datasetfieldvalue set value='69.75' where id=134118;
update datasetfieldvalue set value='77' where id=134120;
+update datasetfieldvalue set value='-82.0' where id=37242;
+update datasetfieldvalue set value='-79.0' where id=37234;
+update datasetfieldvalue set value='-2.0' where id=37232;
+update datasetfieldvalue set value='-4.0' where id=37252;
+update datasetfieldvalue set value='-82.0' where id=33052;
+update datasetfieldvalue set value='-79.0' where id=33054;
+update datasetfieldvalue set value='-2.0' where id=33055;
+update datasetfieldvalue set value='-4.0' where id=33053;
+update datasetfieldvalue set value='9.333333' where id=23944;
+update datasetfieldvalue set value='9.666667' where id=23936;
+update datasetfieldvalue set value='78.666667' where id=23900;
+update datasetfieldvalue set value='78.5' where id=23940;
+update datasetfieldvalue set value='9.333333' where id=22494;
+update datasetfieldvalue set value='9.666667' where id=22496;
+update datasetfieldvalue set value='78.666667' where id=22495;
+update datasetfieldvalue set value='78.5' where id=22497;
+update datasetfieldvalue set value='-27' where id=135009;
+update datasetfieldvalue set value='-26' where id=135023;
+update datasetfieldvalue set value='37' where id=135021;
+update datasetfieldvalue set value='35' where id=135012;
From ce875fcc8dec0c2d449039b5f778cf0d70d37162 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 28 Mar 2023 09:43:21 +0000
Subject: [PATCH 264/354] fixed fine name in S3
---
distros/dataverse.no/init.d/cronjob/backupData.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
index 0a035f3..502c9e9 100755
--- a/distros/dataverse.no/init.d/cronjob/backupData.sh
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -5,7 +5,7 @@ cp -r /secrets/aws-cli/.aws ~
#file=10.21337/OZ4JBV/1869225dfbd-4edecc03da9e
-files=`psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2]}'`
+files=`psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2]}' | sed "s/S3:\/\/$aws_bucket_name://"`
for file in $files
do
From 0496192dfbd73b8af0b206fe2c7c6c41b2b62e30 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 4 Apr 2023 13:57:55 +0000
Subject: [PATCH 265/354] fixed automatic restart
---
restart-dataverse.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/restart-dataverse.sh b/restart-dataverse.sh
index 385b4d2..f13201f 100644
--- a/restart-dataverse.sh
+++ b/restart-dataverse.sh
@@ -7,7 +7,7 @@ rm $healthcheck
DATAVERSE=$1 #'https://test-docker.dataverse.no'
echo $DATAVERSE
-curl ${DATAVERSE}/api/dataverses/root|grep "description" >> $healthcheck
+curl -s ${DATAVERSE}/api/dataverses/root|grep "name" >> $healthcheck
DELAY=15
if [ -s $healthcheck ];
From 76e6e9d40612db52678b6de53c8c802c76a0d130 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 5 Apr 2023 09:41:39 +0000
Subject: [PATCH 266/354] fixed restart feature
---
restart-dataverse.sh | 22 ++++++++++++++++++++--
1 file changed, 20 insertions(+), 2 deletions(-)
diff --git a/restart-dataverse.sh b/restart-dataverse.sh
index f13201f..ce94f14 100644
--- a/restart-dataverse.sh
+++ b/restart-dataverse.sh
@@ -1,14 +1,29 @@
#!/bin/bash
+if [ $# -eq 0 ]
+ then
+ echo "Usage : restart-dataverse.sh [dataverse adress] [distrib location(optional)]"
+ exit 1
+ fi
# Check if Dataverse is online
healthcheck="/tmp/healthcheck.log"
restartpid='/tmp/restart.pid'
rm $healthcheck
DATAVERSE=$1 #'https://test-docker.dataverse.no'
+DISTRIB=${2:-'/distrib'}
+DELAY=15
echo $DATAVERSE
curl -s ${DATAVERSE}/api/dataverses/root|grep "name" >> $healthcheck
-DELAY=15
+
+cat $healthcheck
+
+if test "`find $restartpid -mmin +15 -print 2>/dev/null`"
+ then
+ echo "${restartpid} is too old, deleting, this may lead to an other restart"
+ rm $restartpid
+ fi
+
if [ -s $healthcheck ];
then
@@ -22,9 +37,12 @@ else
else
echo 'restarting...' > $restartpid
date >> /mntblob/logs/restart.log
- cd /distrib/dataverse-docker
+ cd ${DISTRIB}/dataverse-docker
+ echo "down"
/usr/local/bin/docker-compose down
+ echo "waiting ${DELAY}s for ${DATAVERSE} to go down"
sleep $DELAY
+ echo "up"
/usr/local/bin/docker-compose up -d
fi
fi
From ab97ef02582de03ef005bbd4dbc14d6cacf12eb3 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 13 Apr 2023 11:26:22 +0000
Subject: [PATCH 267/354] updated min-part-size
---
distros/dataverse.no/init.d/006-s3-aws-storage.sh | 2 +-
distros/dataverse.no/init.d/022-splitpath.sh | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/006-s3-aws-storage.sh b/distros/dataverse.no/init.d/006-s3-aws-storage.sh
index 474c4dd..2ac349f 100755
--- a/distros/dataverse.no/init.d/006-s3-aws-storage.sh
+++ b/distros/dataverse.no/init.d/006-s3-aws-storage.sh
@@ -8,7 +8,7 @@ if [ "${aws_bucket_name}" ]; then
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.bucket-name\=${aws_bucket_name}"
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.download-redirect\=true"
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.upload-redirect=true"
- # asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=53687091200"
+ asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=536870912"
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.url-expiration-minutes\=120"
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.connection-pool-size\=4096"
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.storage-driver-id\=S3"
diff --git a/distros/dataverse.no/init.d/022-splitpath.sh b/distros/dataverse.no/init.d/022-splitpath.sh
index 740bef3..764420d 100755
--- a/distros/dataverse.no/init.d/022-splitpath.sh
+++ b/distros/dataverse.no/init.d/022-splitpath.sh
@@ -1,3 +1,3 @@
#!/bin/bash
- asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=6553600"
+ #asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=6553600"
From ec92b537620c3bf8a1f477d590b1d810f33532ee Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 19 Apr 2023 08:25:22 +0000
Subject: [PATCH 268/354] remmoved curation label notifications
---
distros/dataverse.no/init.d/024-curation-lables.sh | 1 +
1 file changed, 1 insertion(+)
diff --git a/distros/dataverse.no/init.d/024-curation-lables.sh b/distros/dataverse.no/init.d/024-curation-lables.sh
index 0fa922c..f5f0703 100755
--- a/distros/dataverse.no/init.d/024-curation-lables.sh
+++ b/distros/dataverse.no/init.d/024-curation-lables.sh
@@ -1,2 +1,3 @@
#!/bin/bash
curl -X PUT -d '{"Standard Process":["Curator Assigned", "In Curation", "Awaiting Reply", "Legal or Ethical Concerns", "Awaiting Final Approval", "In Double Blind Review", "Awaiting Article Publication", "Candidate for Deletion"], "Alternate Process":["State 1","State 2","State 3"]}' http://localhost:8080/api/admin/settings/:AllowedCurationLabels
+curl -X PUT -d 'STATUSUPDATED' http://localhost:8080/api/admin/settings/:AlwaysMuted
From d623ae717be464f385a48b4f78949af57b07a5ce Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 28 Apr 2023 12:55:04 +0000
Subject: [PATCH 269/354] fixed database backup to s3
---
distros/dataverse.no/init.d/cronjob/backupData.sh | 3 +++
distros/dataverse.no/init.d/cronjob/dumpdatabase.sh | 2 +-
distros/dataverse.no/runOnce/crontab.sh | 3 +++
3 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
index 502c9e9..90b4aca 100755
--- a/distros/dataverse.no/init.d/cronjob/backupData.sh
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -6,6 +6,9 @@ cp -r /secrets/aws-cli/.aws ~
#file=10.21337/OZ4JBV/1869225dfbd-4edecc03da9e
files=`psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2]}' | sed "s/S3:\/\/$aws_bucket_name://"`
+dump=`ls /data/databaseDumps/ -Art | tail -n 1`
+
+aws s3 --endpoint https://$aws_endpoint cp /data/databaseDumps/$dump s3://$aws_bucket_name/databaseDumps
for file in $files
do
diff --git a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
index f35db71..6040505 100755
--- a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
+++ b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
@@ -1,6 +1,6 @@
#!/bin/bash
docker exec --user postgres postgres pg_dump -U dataverse dataverse > /mnt/dataverse.dump
gzip -f /mnt/dataverse.dump
-cp /mnt/dataverse.dump.gz /mntblob/databaseDumps/dataverse.dump.gz`date +%Y%m%d`
+cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/dataverse.`date +%Y%m%d`.dump.gz
rsync -arvP --rsh=ssh /mnt/dataverse.dump.gz DVmtr-cmp01:/tmp/dataverse.dump.gz
diff --git a/distros/dataverse.no/runOnce/crontab.sh b/distros/dataverse.no/runOnce/crontab.sh
index 33f8355..731f843 100644
--- a/distros/dataverse.no/runOnce/crontab.sh
+++ b/distros/dataverse.no/runOnce/crontab.sh
@@ -6,3 +6,6 @@ cp -r /distib/private/.ssh /var/opt/microsoft/omsagent/run
chown -R omsagent /var/opt/microsoft/omsagent/run/.ssh
#0 0 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log
#0 0 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log
+#1 0 * * * /distrib/dataverse-docker/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
+#*/3 * * * * /bin/bash /distrib/dataverse-docker/restart-dataverse.sh https://dataverse.no
+
From 2e4effe070474014d1b7680843698e1e0a340bb8 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 11 May 2023 09:35:33 +0000
Subject: [PATCH 270/354] updated postgres
---
distros/dataverse.no/docker-compose.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 87452db..489d30a 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -37,7 +37,7 @@ services:
container_name: postgres
ports:
- "5433:5432"
- image: postgres:10.13
+ image: postgres:15.2
restart: unless-stopped
environment:
From 8c8cf93407a45437e299ae83617fd5a9989a7d49 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 24 May 2023 14:09:38 +0000
Subject: [PATCH 271/354] added maintenace banner
---
.../init.d/cronjob/maintenance_notification.json | 10 ++++++++++
.../init.d/cronjob/maintenance_notification_on.sh | 2 ++
2 files changed, 12 insertions(+)
create mode 100644 distros/dataverse.no/init.d/cronjob/maintenance_notification.json
create mode 100755 distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh
diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification.json b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
new file mode 100644
index 0000000..8ad88ce
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
@@ -0,0 +1,10 @@
+{
+ "dismissibleByUser": "false",
+ "messageTexts": [
+ {
+ "lang": "en",
+ "message": "Please note that at 15:30 CEST today, DataverseNO will not be available for approx. 5 minutes due to regular maintenance. Please avoid upload data or edit your dataset in other ways during this short period. Thank you! For questions, please contact the DataverseNO Team at researchdata@hjelp.uit.no"
+ }
+
+ ]
+}
diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh b/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh
new file mode 100755
index 0000000..c6b0cc7
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh
@@ -0,0 +1,2 @@
+#! /bin/bash
+curl -H "Content-type:application/json" -X POST http://localhost:8080/api/admin/bannerMessage --upload-file ${INIT_SCRIPTS_FOLDER}/cronjob/maintenance_notification.json
From ccbfd4dd6912c7ea631408d8020d46cd884fe4b2 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 31 May 2023 13:03:32 +0000
Subject: [PATCH 272/354] updatede cronjob
---
.../dataverse.no/init.d/cronjob/maintenance_notification_on.sh | 2 ++
1 file changed, 2 insertions(+)
diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh b/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh
index c6b0cc7..f745558 100755
--- a/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh
+++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh
@@ -1,2 +1,4 @@
#! /bin/bash
curl -H "Content-type:application/json" -X POST http://localhost:8080/api/admin/bannerMessage --upload-file ${INIT_SCRIPTS_FOLDER}/cronjob/maintenance_notification.json
+
+# 0 16 * * 3 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_on.sh"
From b551cecbf3272c1534be07bc7d941e8fcafb7841 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 31 May 2023 13:08:06 +0000
Subject: [PATCH 273/354] new update message
---
.../dataverse.no/init.d/cronjob/maintenance_notification.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification.json b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
index 8ad88ce..e19174f 100644
--- a/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
+++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
@@ -3,7 +3,7 @@
"messageTexts": [
{
"lang": "en",
- "message": "Please note that at 15:30 CEST today, DataverseNO will not be available for approx. 5 minutes due to regular maintenance. Please avoid upload data or edit your dataset in other ways during this short period. Thank you! For questions, please contact the DataverseNO Team at researchdata@hjelp.uit.no"
+ "message": "Please note that at 08:00 CE(S)T every Thursday morning, DataverseNO will not be available for approx. 5 minutes due to regular maintenance. Please avoid upload data or edit your dataset in other ways during this short period. Thank you! For questions, please contact the DataverseNO Team at researchdata@hjelp.uit.no."
}
]
From 26c960eaa6684d828d2c56635ba13040bdc19ed6 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 20 Jun 2023 09:05:58 +0000
Subject: [PATCH 274/354] added update script
---
update.sh | 5 +++++
1 file changed, 5 insertions(+)
create mode 100755 update.sh
diff --git a/update.sh b/update.sh
new file mode 100755
index 0000000..2a82c78
--- /dev/null
+++ b/update.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+apt-get update -y && apt-get dist-upgrade -y && apt-get autoremove -y && apt-get clean -y && apt-get autoclean -y
+[ -e /var/run/reboot-required ] && reboot
+
+# 0 04 * * 4 /distrib/dataverse-docker/update.sh
From 1e22137652a3c6dd32b89d5c6943cae8b3ae9c15 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 21 Jun 2023 13:13:07 +0000
Subject: [PATCH 275/354] updated interval to remove potential edge case
---
distros/dataverse.no/init.d/cronjob/backupfiles.sql | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
index 65151bc..aa963a8 100644
--- a/distros/dataverse.no/init.d/cronjob/backupfiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql
@@ -4,7 +4,7 @@
--select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day');
-select storageidentifier, CONCAT( get_authority(id), '/', get_identifier(id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')) from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day');
+select storageidentifier, CONCAT( get_authority(id), '/', get_identifier(id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')) from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '2 day');
From 963ea86e99221b3677e8aa22924af77a2c122efb Mon Sep 17 00:00:00 2001
From: Obi <34234629+oodu@users.noreply.github.com>
Date: Tue, 27 Jun 2023 14:09:17 +0200
Subject: [PATCH 276/354] Update custom-footer.html
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
AddedTilgjengelegheitserklæring (Accessibility Statement)<
---
distros/dataverse.no/modification/custom-footer.html | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/modification/custom-footer.html b/distros/dataverse.no/modification/custom-footer.html
index ef74808..f033137 100644
--- a/distros/dataverse.no/modification/custom-footer.html
+++ b/distros/dataverse.no/modification/custom-footer.html
@@ -38,7 +38,7 @@
in cooperation with its partner institutions
- About DataverseNO | Contact
+ About DataverseNO | Contact | Tilgjengelegheitserklæring (Accessibility Statement)
From 99f76377c59451eec5fde312c31c6419792dc66f Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 28 Jun 2023 10:34:16 +0000
Subject: [PATCH 277/354] updated payara and postgres
---
distros/dataverse.no/docker-compose.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 489d30a..5282e8e 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -4,7 +4,7 @@ version: '3.7'
services:
reverse-proxy:
# The official v2 Traefik docker image
- image: traefik:v2.2
+ image: traefik:v2.10.3
# Enables the web UI and tells Traefik to listen to docker
container_name: traefik
command:
@@ -37,7 +37,7 @@ services:
container_name: postgres
ports:
- "5433:5432"
- image: postgres:15.2
+ image: postgres:15.3
restart: unless-stopped
environment:
From e098497042d278fa3a2aa9fe9568f74fce887c3b Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 4 Jul 2023 11:30:41 +0000
Subject: [PATCH 278/354] update shibboleth image and whoami
---
distros/dataverse.no/docker-compose.yaml | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 5282e8e..ba0a931 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -57,8 +57,8 @@ services:
shibboleth:
networks:
- traefik
- #image: test02/shibboleth:latest
- image: ${DOCKER_HUB}/shibboleth:3.3.0.A2.37
+ #image: test03/shibboleth:3.3.0.B
+ image: ${DOCKER_HUB}/shibboleth:3.4.1
container_name: shibboleth
privileged: true
ports:
@@ -83,6 +83,7 @@ services:
solr:
networks:
- traefik
+ #image: solr:8.11.1
image: ${DOCKER_HUB}/solr:8.9.0
container_name: solr
privileged: true
@@ -107,7 +108,7 @@ services:
whoami:
networks:
- traefik
- image: "containous/whoami"
+ image: "traefik/whoami"
container_name: "whoami"
labels:
- "traefik.enable=true"
From f68fbe5b7d7b32924f5ef638744b694e2912259f Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 5 Jul 2023 09:49:18 +0000
Subject: [PATCH 279/354] restart banner message auto off
---
.../init.d/cronjob/maintenance_notification_off.sh | 5 +++++
1 file changed, 5 insertions(+)
create mode 100755 distros/dataverse.no/init.d/cronjob/maintenance_notification_off.sh
diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification_off.sh b/distros/dataverse.no/init.d/cronjob/maintenance_notification_off.sh
new file mode 100755
index 0000000..dadfc3d
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification_off.sh
@@ -0,0 +1,5 @@
+#! /bin/bash
+
+curl -X DELETE http://localhost:8080/api/admin/bannerMessage/$(curl -s -X GET http://localhost:8080/api/admin/bannerMessage | jq -r 'first(.data[]|select(.displayValue | contains("due to regular maintenance")).id)')
+
+# 30 06 * * 4 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_off.sh"
From 5214ca061fa533c60705408c94130d45797b5ce6 Mon Sep 17 00:00:00 2001
From: Philipp Conzett
Date: Tue, 18 Jul 2023 15:19:04 +0200
Subject: [PATCH 280/354] Update affiliations.csv
---
distros/dataverse.no/init.d/affiliations/affiliations.csv | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.csv b/distros/dataverse.no/init.d/affiliations/affiliations.csv
index 516ef55..639f464 100644
--- a/distros/dataverse.no/init.d/affiliations/affiliations.csv
+++ b/distros/dataverse.no/init.d/affiliations/affiliations.csv
@@ -16,7 +16,7 @@ id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_ab
15,uib.no,uibno,&explicit/1-uibno,University of Bergen,UiB,uib,2002-red-dvno,cloudian-dvno,137,uib.no,University of Bergen,Universitetet i Bergen,Universitetet i Bergen,University of Bergen,Bergen Universitehta,https://ror.org/03zga2b32,University of Bergen,,
16,uio.no,uiono,&explicit/1-uiono,University of Oslo,UiO,uio,2002-red-dvno,cloudian-dvno,138,uio.no,University of Oslo,Universitetet i Oslo,Universitetet i Oslo,University of Oslo,Oslo Universitehta,https://ror.org/01xtthb56,University of Oslo,UiO,
17,uit.no,uitno,&explicit/1-uitno,UiT The Arctic University of Norway,UiT,uit,2002-red-dvno,cloudian-dvno,139,uit.no,UiT The Arctic University of Norway,UiT Norges arktiske universitet,UiT Noregs arktiske universitet,UiT The Arctic University of Norway,UiT Norgga árktalaš universitehta,https://ror.org/00wge5k78,UiT The Arctic University of Norway,UiT,
-18,ntnu.no,ntnuno,&explicit/1-ntnuno,Norwegian University of Science and Technology,NTNU,ntnu,2002-red-dvno,cloudian-dvno,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU,
+18,ntnu.no,ntnuno,&explicit/1-ntnuno,NTNU – Norwegian University of Science and Technology,NTNU,ntnu,2002-red-dvno,cloudian-dvno,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU,
19,nina.no,ninano,&explicit/1-ninano,Norwegian Institute for Nature Research,NINA,,,,144,nina.no,NINA,NINA,NINA,,,https://ror.org/04aha0598,Norwegian Institute for Nature Research,NINA,
20,ngu.no,nguno,&explicit/1-nguno,Geological Survey of Norway,NGU,,,,145,ngu.no,Geological Survey of Norway,Norges geologiske undersøkelse,Norges geologiske undersøkelse,Geological Survey of Norway,,,,,
21,himolde.no,himoldeno,&explicit/1-himoldeno,Molde University College,HiM,,,,158,himolde.no,Molde University College,Høgskolen i Molde,Høgskulen i Molde,Molde University College,Molde Allaskuvla,https://ror.org/00kxjcd28,Molde University College,HiM,
@@ -34,7 +34,7 @@ id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_ab
33,fhs.mil.no,fhsmilno,&explicit/1-fhsmilno,Norwegian Defence University College,NDUC,,,,115267,mil.no,Norwegian Defence University College,Forsvarets høgskoler,Forsvarets høgskuler,Norwegian Defence University College,,https://ror.org/02vfz9j23,Norwegian Defence University College,NDUC,
34,ansgarskolen.no,ansgarskolenno,&explicit/1-ansgarskolenno,Ansgar University College,Ansgar,,,,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,,
35,oslomet.no,oslometno,&explicit/1-oslometno,OsloMet – Oslo Metropolitan University,OsloMet,,,,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA,
-36,nmbu.no,nmbuno,&explicit/1-nmbuno,Norwegian University of Life Sciences,NMBU,nmbu,2002-red-dvno,cloudian-dvno,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU,
+36,nmbu.no,nmbuno,&explicit/1-nmbuno,Norwegian University of Life Sciences (NMBU),NMBU,nmbu,2002-red-dvno,cloudian-dvno,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU,
37,nibio.no,nibiono,&explicit/1-nibiono,Norwegian Institute of Bioeconomy Research,NIBIO,,,,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO,
38,vid.no,vidno,&explicit/1-vidno,VID Specialized University,VID,vid,2002-red-dvno,cloudian-dvno,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID,
39,nord.no,nordno,&explicit/1-nordno,Nord University,NORD,nord,2002-red-dvno,cloudian-dvno,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,,
From 2ddab041fc65afab4d48a39f5c871351016f851e Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 24 Aug 2023 09:46:25 +0200
Subject: [PATCH 281/354] Update maintenance.rst
fixed typo
---
doc/maintenance.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/doc/maintenance.rst b/doc/maintenance.rst
index 31c92be..9237217 100644
--- a/doc/maintenance.rst
+++ b/doc/maintenance.rst
@@ -85,7 +85,7 @@ if you are using a dockerized version : ``docker exec -it postgres /bin/sh``
su postgres
psql -U dataverse dataverse
- sql update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.');
+ update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.');
Setting up an S3 bucket
-----------------------
From e9a647cd6c1c18f12d2d6e93a1ec37f97cbfc74a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 29 Aug 2023 11:35:39 +0000
Subject: [PATCH 282/354] chaged email to support@dataverse.no.
---
.../dataverse.no/init.d/cronjob/maintenance_notification.json | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification.json b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
index e19174f..d5067d1 100644
--- a/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
+++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json
@@ -3,7 +3,7 @@
"messageTexts": [
{
"lang": "en",
- "message": "Please note that at 08:00 CE(S)T every Thursday morning, DataverseNO will not be available for approx. 5 minutes due to regular maintenance. Please avoid upload data or edit your dataset in other ways during this short period. Thank you! For questions, please contact the DataverseNO Team at researchdata@hjelp.uit.no."
+ "message": "Please note that at 08:00 CE(S)T every Thursday morning, DataverseNO will not be available for approx. 5 minutes due to regular maintenance. Please avoid upload data or edit your dataset in other ways during this short period. Thank you! For questions, please contact the DataverseNO Team at support@dataverse.no."
}
]
From a3db67bda598d40503d7ff0fc9c0ca5656142332 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 14 Sep 2023 07:37:28 +0000
Subject: [PATCH 283/354] updated email
---
distros/dataverse.no/modification/Bundle.properties.patch | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/modification/Bundle.properties.patch b/distros/dataverse.no/modification/Bundle.properties.patch
index 7dc1883..6dadd3e 100644
--- a/distros/dataverse.no/modification/Bundle.properties.patch
+++ b/distros/dataverse.no/modification/Bundle.properties.patch
@@ -7,7 +7,7 @@
-login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
-login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance.
+login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
-+login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance.
++login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance.
login.builtin.credential.usernameOrEmail=Username/Email
login.builtin.credential.password=Password
login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
From 45b29d8e7d237ca40a88708a88cb019857ffe49b Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 21 Sep 2023 09:12:51 +0000
Subject: [PATCH 284/354] maintenacemesage
---
.../init.d/cronjob/onetime_maintenace.json | 10 ++++++++++
1 file changed, 10 insertions(+)
create mode 100644 distros/dataverse.no/init.d/cronjob/onetime_maintenace.json
diff --git a/distros/dataverse.no/init.d/cronjob/onetime_maintenace.json b/distros/dataverse.no/init.d/cronjob/onetime_maintenace.json
new file mode 100644
index 0000000..1fe7cce
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/onetime_maintenace.json
@@ -0,0 +1,10 @@
+{
+ "dismissibleByUser": "false",
+ "messageTexts": [
+ {
+ "lang": "en",
+ "message": "Due to maintenance, DataverseNO will be unavailable between Thursday September 21 at 16:00 CEST until Friday September 22 at 08:00 CEST. Apologies for the short notice and for the inconvenience. If you have any questions or comments, please contact the DataverseNO team at support@dataverse.no."
+ }
+
+ ]
+}
From c5f528594ca30e5ee41b9cae5e5e1e9d4cbb2feb Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 4 Oct 2023 15:41:32 +0000
Subject: [PATCH 285/354] updated ssl protocol
---
distros/dataverse.no/configs/http-ssl.conf | 15 +++++++++++++--
1 file changed, 13 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/configs/http-ssl.conf b/distros/dataverse.no/configs/http-ssl.conf
index b7e7371..670c0aa 100755
--- a/distros/dataverse.no/configs/http-ssl.conf
+++ b/distros/dataverse.no/configs/http-ssl.conf
@@ -78,6 +78,8 @@ SSLCryptoDevice builtin
+
+
# General setup for the virtual host, inherited from global configuration
#DocumentRoot "/var/www/html"
#ServerName www.example.com:443
@@ -114,12 +116,16 @@ SSLEngine on
# SSL Protocol support:
# List the enable protocol levels with which clients will be able to
# connect. Disable SSLv2 access by default:
-SSLProtocol all -SSLv2 -SSLv3
+SSLProtocol +all +TLSv1.3 +TLSv1.2 -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
+SSLCipherSuite ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256
+SSLHonorCipherOrder on
+SSLCompression off
+SSLSessionTickets off
# SSL Cipher Suite:
# List the ciphers that the client is permitted to negotiate.
# See the mod_ssl documentation for a complete list.
-SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA
+#SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA
# Speed-optimized SSL Cipher configuration:
# If speed is your main concern (on busy HTTPS servers e.g.),
@@ -345,5 +351,10 @@ Customlog /var/log/httpd/access.log combined
ErrorLog /var/log/httpd/error.log
ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i"
+
+#RewriteEngine On
+#RewriteRule .* https://site.uit.no/dataverseno/nn/driftsmelding/ [R=302,L]
+
+
From 76522a1f58e2ba78fd687cb5d2de086d55cc9bdd Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 10 Oct 2023 10:03:19 +0200
Subject: [PATCH 286/354] added robot.txt
---
distros/dataverse.no/configs/robots.txt | 26 +++++++++++++++++++++++++
1 file changed, 26 insertions(+)
create mode 100644 distros/dataverse.no/configs/robots.txt
diff --git a/distros/dataverse.no/configs/robots.txt b/distros/dataverse.no/configs/robots.txt
new file mode 100644
index 0000000..804a067
--- /dev/null
+++ b/distros/dataverse.no/configs/robots.txt
@@ -0,0 +1,26 @@
+User-agent: *
+# Note: In its current form, this sample robots.txt makes the site
+# accessible to all the crawler bots (specified as "User-agent: *")
+# It further instructs the bots to access and index the dataverse and dataset pages;
+# it also tells them to stay away from all other pages (the "Disallow: /" line);
+# and also not to follow any search links on a dataverse page.
+# It is possible to specify different access rules for different bots.
+# For example, if you only want to make the site accessed by Googlebot, but
+# keep all the other bots away, un-comment out the following two lines:
+#Disallow: /
+#User-agent: Googlebot
+Allow: /$
+Allow: /dataset.xhtml
+Allow: /dataverse/
+Allow: /sitemap/
+# The following lines are for the facebook, twitter and linkedin preview bots:
+Allow: /api/datasets/:persistentId/thumbnail
+Allow: /javax.faces.resource/images/
+# Comment out the following TWO lines if you DON'T MIND the bots crawling the search API links on dataverse pages:
+Disallow: /dataverse/*?q
+Disallow: /dataverse/*/search
+Disallow: /
+# Crawl-delay specification *may* be honored by *some* bots.
+# It is *definitely* ignored by Googlebot (they never promise to
+# recognize it either - it's never mentioned in their documentation)
+Crawl-delay: 20
From 64a5cf2feba49ab5597af02161c2567e05a0b180 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 11 Oct 2023 13:10:12 +0000
Subject: [PATCH 287/354] updated robot.txt confi
---
distros/dataverse.no/configs/http-ssl.conf | 6 +++++-
distros/dataverse.no/docker-compose.yaml | 2 ++
2 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/configs/http-ssl.conf b/distros/dataverse.no/configs/http-ssl.conf
index 670c0aa..fc5b0ac 100755
--- a/distros/dataverse.no/configs/http-ssl.conf
+++ b/distros/dataverse.no/configs/http-ssl.conf
@@ -355,6 +355,10 @@ ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [clien
#RewriteEngine On
#RewriteRule .* https://site.uit.no/dataverseno/nn/driftsmelding/ [R=302,L]
-
+
+ ProxyPass !
+
+Alias /robots.txt /var/www/robots.txt
+
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index ba0a931..a49437c 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -69,6 +69,7 @@ services:
# - ${LOGS_PATH}/shibboleth/shibboleth:/var/log/shibboleth
- ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth
- ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf
+ - ./configs/robots.txt:/var/www/robots.txt
- ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/pki/tls/certs/localhost.crt
- ${CONFIGURATION_PATH}/configuration/files/localhost.key:/etc/pki/tls/private/localhost.key
# hostname: ${hostname}
@@ -156,6 +157,7 @@ services:
- "mailhost"
- "mailuser"
- "no_reply_email"
+ - "support_email"
- "smtp_password"
- "smtp_port"
- "socket_port"
From 183e9177f967ab02d48568fc72cacc47b258fe6d Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 13 Oct 2023 12:36:09 +0000
Subject: [PATCH 288/354] added sitemap to crontab
---
distros/dataverse.no/runOnce/crontab.sh | 12 +++++++-----
1 file changed, 7 insertions(+), 5 deletions(-)
diff --git a/distros/dataverse.no/runOnce/crontab.sh b/distros/dataverse.no/runOnce/crontab.sh
index 731f843..9680658 100644
--- a/distros/dataverse.no/runOnce/crontab.sh
+++ b/distros/dataverse.no/runOnce/crontab.sh
@@ -4,8 +4,10 @@ chown omsagent /mntblob/databaseDumps/
usermod -aG docker omsagent
cp -r /distib/private/.ssh /var/opt/microsoft/omsagent/run
chown -R omsagent /var/opt/microsoft/omsagent/run/.ssh
-#0 0 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log
-#0 0 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log
-#1 0 * * * /distrib/dataverse-docker/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
-#*/3 * * * * /bin/bash /distrib/dataverse-docker/restart-dataverse.sh https://dataverse.no
-
+#0 1 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log
+#0 0 * * * /distrib/dataverse-docker/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
+#*/2 * * * * /bin/bash /distrib/dataverse-docker/restart-dataverse.sh https://dataverse.no >> /var/log/restartlogs.log
+#0 16 * * 3 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_on.sh"
+#0 06 * * 4 su root /distrib/dataverse-docker/update.sh
+#30 06 * * 4 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_off.sh"
+#0 21 * * * docker exec dataverse bash -c "curl -X POST http://localhost:8080/api/admin/sitemap"
From 590fb5e9da031cb80e5711836ffe453310124033 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Wed, 29 Nov 2023 17:02:46 +0000
Subject: [PATCH 289/354] fix geobox
---
.../migration/_dvno_geolocation_cleaning20231129.sh | 3 +++
.../migration/_dvno_geolocation_cleaning20231129.sql | 2 ++
2 files changed, 5 insertions(+)
create mode 100644 distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sh
create mode 100644 distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sh b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sh
new file mode 100644
index 0000000..cd0f0b3
--- /dev/null
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+export PGPASSWORD=`cat /secrets/db/password`
+psql -U dataverse dataverse -h postgres -f ./_dvno_geolocation_cleaning20231129.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sql
new file mode 100644
index 0000000..f70d735
--- /dev/null
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sql
@@ -0,0 +1,2 @@
+update datasetfieldvalue set value='18.57' where id=204418;
+update datasetfieldvalue set value='69.41' where id=204435;
From ef5d566e643b15bf78bf7abfadc07b9a336d2025 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 4 Jan 2024 11:27:35 +0000
Subject: [PATCH 290/354] updated database dump nane to avoid overite
---
distros/dataverse.no/init.d/cronjob/dumpdatabase.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
index 6040505..925c1eb 100755
--- a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
+++ b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
@@ -1,6 +1,6 @@
#!/bin/bash
docker exec --user postgres postgres pg_dump -U dataverse dataverse > /mnt/dataverse.dump
gzip -f /mnt/dataverse.dump
-cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/dataverse.`date +%Y%m%d`.dump.gz
+cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/dataverse.`date +%Y%m%d_%H%M%z`.dump.gz
rsync -arvP --rsh=ssh /mnt/dataverse.dump.gz DVmtr-cmp01:/tmp/dataverse.dump.gz
From 623759cfc9d58279d5fc98a5e24278cf7fca55be Mon Sep 17 00:00:00 2001
From: Philipp Conzett
Date: Mon, 8 Jan 2024 12:10:20 +0100
Subject: [PATCH 291/354] Update affiliations.csv
---
distros/dataverse.no/init.d/affiliations/affiliations.csv | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.csv b/distros/dataverse.no/init.d/affiliations/affiliations.csv
index 639f464..c51d91e 100644
--- a/distros/dataverse.no/init.d/affiliations/affiliations.csv
+++ b/distros/dataverse.no/init.d/affiliations/affiliations.csv
@@ -35,7 +35,7 @@ id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_ab
34,ansgarskolen.no,ansgarskolenno,&explicit/1-ansgarskolenno,Ansgar University College,Ansgar,,,,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,,
35,oslomet.no,oslometno,&explicit/1-oslometno,OsloMet – Oslo Metropolitan University,OsloMet,,,,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA,
36,nmbu.no,nmbuno,&explicit/1-nmbuno,Norwegian University of Life Sciences (NMBU),NMBU,nmbu,2002-red-dvno,cloudian-dvno,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU,
-37,nibio.no,nibiono,&explicit/1-nibiono,Norwegian Institute of Bioeconomy Research,NIBIO,,,,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO,
+37,nibio.no,nibiono,&explicit/1-nibiono,Norwegian Institute of Bioeconomy Research,NIBIO,nibio,2002-red-dvno,cloudian-dvno,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO,
38,vid.no,vidno,&explicit/1-vidno,VID Specialized University,VID,vid,2002-red-dvno,cloudian-dvno,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID,
39,nord.no,nordno,&explicit/1-nordno,Nord University,NORD,nord,2002-red-dvno,cloudian-dvno,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,,
40,usn.no,usnno,&explicit/1-usnno,University of South-Eastern Norway,USN,,,,2066647,usn.no,University of South-Eastern Norway,Universitetet i Sørøst-Norge,Universitetet i Søraust-Noreg,University of South-Eastern Norway,,https://ror.org/05ecg5h20,University of South-Eastern Norway,USN,
@@ -80,7 +80,7 @@ id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_ab
79,nlr.no,nlrno,&explicit/1-nlrno,Norsk Landbruksrådgiving,NLR,,,,,,,,,,,https://ror.org/03c1zct07,Norsk Landbruksrådgiving,NLR,
80,nobel.no,nobelno,&explicit/1-nobelno,Norwegian Nobel Institute,Nobel,,,,,,,,,,,https://ror.org/055wgnw59,Norwegian Nobel Institute,,
81,nofima.no,nofimano,&explicit/1-nofimano,Nofima,Nofima,nofima,2002-red-dvno,cloudian-dvno,,,,,,,,https://ror.org/02v1rsx93,Nofima,,
-82,norceresearch.no,norceresearchno,&explicit/1-norceresearchno,Norwegian Research Centre,NORCE,,,,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE,
+82,norceresearch.no,norceresearchno,&explicit/1-norceresearchno,NORCE Norwegian Research Centre,NORCE,,,,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE,
83,norsar.no,norsarno,&explicit/1-norsarno,Norwegian Seismic Array,NORSAR,,,,,,,,,,,https://ror.org/02vw8cm83,Norsar,,
84,norsok.no,norsokno,&explicit/1-norsokno,Norwegian Centre for Organic Agriculture,NORSØK,,,,,,,,,,,,,,
85,norsus.no,norsusno,&explicit/1-norsusno,Norwegian Institute for Sustainability Research,NORSUS,,,,,,,,,,,,,,
From b48415821ae03b782dd036d8f023de1c32cde6d6 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 30 Jan 2024 15:52:58 +0000
Subject: [PATCH 292/354] updated env_sample
---
.env_sample | 36 +++++++++++++++++++++++-------------
1 file changed, 23 insertions(+), 13 deletions(-)
diff --git a/.env_sample b/.env_sample
index 7a55cee..1074965 100644
--- a/.env_sample
+++ b/.env_sample
@@ -2,20 +2,27 @@ LOCAL_WAR=./dataverse.war
COMPOSE_FILE=distros/dataverse.no/docker-compose.yaml
CONFIGURATION_PATH=/distrib/private
DOCROOT=/distrib
-VERSION= 5.11.12.7
-DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno
+LOGS_PATH=/distrib/private/logs
+DOCKER_HUB=dockerhub/dataverseno
+VERSION=5.13.no
+#DOCKER_HUB=coronawhy
SECRETS_DIR="${CONFIGURATION_PATH}/secrets"
-#WEBANALYTICSON=true
+POSTGRESTMP=/mnt/tmp/postgres
+
+#dataverse
+WEBANALYTICSON=true
+TESTBANNER=true
# Dataverse database settings
DATAVERSE_DB_HOST=postgres
DATAVERSE_DB_USER=dataverse
-DATAVERSE_DB_PASSWORD=psqlpassword
+DATAVERSE_DB_PASSWORD=dvnsecret
DATAVERSE_DB_NAME=dataverse
PASSWORD_FILE=/secrets/asadminpwd
-#SOLR
+
+# Solr
SOLR_SERVICE_HOST=solr:8983
SOLR_SERVICE_PORT=8983
DATAVERSE_URL=localhost:8080
@@ -29,12 +36,10 @@ CONFIG_FILE=counter-processor-config.yaml
# Postgres settings
POSTGRES_USER=dataverse
-POSTGRES_PASSWORD=psqlpassword
+POSTGRES_PASSWORD=dvnsecret
POSTGRES_SERVER=postgres
POSTGRES_DATABASE=dataverse
POSTGRES_DB=dataverse
-POSTGRESTMP=/mnt/tmp/postgres
-
# Domain configuration and init folder
hostname=dataverse.no
@@ -57,9 +62,12 @@ baseurlstring=https\:\/\/mds.test.datacite.org
# AWS settings
# https://guides.dataverse.org/en/latest/installation/config.html#id90
+aws_config=/secrets/aws-cli/.aws/cloudian
aws_bucket_name=awsbucketname
aws_s3_profile=cloudian
-aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
+aws_endpoint=s3-oslo.educloud.no
+aws_endpoint_url=https\:\/\/${aws_endpoint}
+#aws_endpoint_region=oslo
# AWS UiT
aws_uit_bucket_name=awsbucketname2
@@ -67,18 +75,20 @@ aws_uit_s3_profile=uit
#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no
-# Mail relay
+# Mail
# https://guides.dataverse.org/en/latest/developers/troubleshooting.html
-system_email=
-mailhost=smtp-relay.exemple.com
-mailuser=no-reply@dataverse.no
+system_email=""
+mailhost=smtp-relay.exemple.dataverse
+mailuser="DataverseNO "
no_reply_email=no-reply@dataverse.no
smtp_password=smtppassword
smtp_port=465
socket_port=465
+support_email="DataverseNO "
# Federated authentification file
# https://guides.dataverse.org/en/latest/installation/shibboleth.html
federated_json_file=/secrets/openid.json
azure_json_file=/secrets/azopenid.json
+orcid_json_file=/secrets/orcid-member.json
From e773e4579f6e5ad00ea72c8c3837c749bb1dca16 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 1 Mar 2024 08:05:41 +0000
Subject: [PATCH 293/354] fixed geobox for 2FPGZHZJ
---
...tion_cleaning20231129.sh => _dvno_geolocation_cleaning.sh} | 2 +-
.../migration/_dvno_geolocation_cleaning20240301.sql | 4 ++++
2 files changed, 5 insertions(+), 1 deletion(-)
rename distros/dataverse.no/migration/{_dvno_geolocation_cleaning20231129.sh => _dvno_geolocation_cleaning.sh} (84%)
create mode 100644 distros/dataverse.no/migration/_dvno_geolocation_cleaning20240301.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sh b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
similarity index 84%
rename from distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sh
rename to distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
index cd0f0b3..d79132e 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sh
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
@@ -1,3 +1,3 @@
#!/bin/bash
export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f ./_dvno_geolocation_cleaning20231129.sql
+psql -U dataverse dataverse -h postgres -f ./_dvno_geolocation_cleaning20240301.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning20240301.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20240301.sql
new file mode 100644
index 0000000..c0bc9bd
--- /dev/null
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20240301.sql
@@ -0,0 +1,4 @@
+update datasetfieldvalue set value='11.11621830' where id=206935;
+update datasetfieldvalue set value='11.51643553' where id=206932;
+update datasetfieldvalue set value='61.57060935' where id=206933;
+update datasetfieldvalue set value='61.15827431' where id=206938;
From 7b13634242a244a299887a95b1f1c8591a64c00d Mon Sep 17 00:00:00 2001
From: root
Date: Tue, 5 Mar 2024 15:04:42 +0000
Subject: [PATCH 294/354] changed previewer config
---
distros/dataverse.no/init.d/07-previewers.sh | 120 --
distros/dataverse.no/runOnce/previewers.sh | 1464 ++++++++++++++++++
2 files changed, 1464 insertions(+), 120 deletions(-)
delete mode 100755 distros/dataverse.no/init.d/07-previewers.sh
create mode 100644 distros/dataverse.no/runOnce/previewers.sh
diff --git a/distros/dataverse.no/init.d/07-previewers.sh b/distros/dataverse.no/init.d/07-previewers.sh
deleted file mode 100755
index 7df0fa0..0000000
--- a/distros/dataverse.no/init.d/07-previewers.sh
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/bin/bash
-
-# https://guides.dataverse.org/en/5.12.1/api/external-tools.html#external-tools-for-files
-export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/read_text_plain.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Text\", \"description\":\"Read the text file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/plain\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_html_type.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Html\", \"description\":\"View the html file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HtmlPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/html\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mp3.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mp3\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_mpeg.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/mpeg\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_wav.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/wav\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_audio_ogg.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Audio\", \"description\":\"Listen to an audio file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/AudioPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"audio/ogg\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_gif.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/gif\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_jpeg.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/jpeg\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_image_png.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Image\", \"description\":\"Preview an image file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/ImagePreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"image/png\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_pdf.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Read Document\", \"description\":\"Read a pdf document.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/PDFPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/pdf\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_mp4.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/mp4\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_ogg.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/ogg\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/play_video_quicktime.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"Play Video\", \"description\":\"Watch a video file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/VideoPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"video/quicktime\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/csv\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_csv_tabular.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/comma-separated-values\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_tsv.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Data\", \"description\":\"View the spreadsheet data.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/SpreadsheetPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"text/tab-separated-values\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_stata.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Stata File\", \"description\":\"View the Stata file as text.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-stata-syntax\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_r_file.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View R file\", \"description\":\"View the R file as text.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/TextPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"type/x-r-syntax\" }"
-fi
-
-psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/view_hypothesis.sql -o /tmp/toolexist.status
-EXIST=`grep '0 rows' /tmp/toolexist.status`
-if [[ -n $EXIST ]]; then
- curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ "{ \"displayName\":\"View Annotations\", \"description\":\"View the annotation entries in a file.\", \"scope\":\"file\", \"type\":\"preview\", \"hasPreviewMode\":\"true\", \"toolUrl\":\"https://gdcc.github.io/dataverse-previewers/previewers/v1.3/HypothesisPreview.html\", \"toolParameters\": { \"queryParameters\":[ {\"fileid\":\"{fileId}\"}, {\"siteUrl\":\"{siteUrl}\"}, {\"key\":\"{apiToken}\"}, {\"datasetid\":\"{datasetId}\"}, {\"datasetversion\":\"{datasetVersion}\"}, {\"locale\":\"{localeCode}\"} ] }, \"contentType\":\"application/x-json-hypothesis\" }"
-fi
-
-
diff --git a/distros/dataverse.no/runOnce/previewers.sh b/distros/dataverse.no/runOnce/previewers.sh
new file mode 100644
index 0000000..abca01b
--- /dev/null
+++ b/distros/dataverse.no/runOnce/previewers.sh
@@ -0,0 +1,1464 @@
+#!/bin/bash
+
+
+#https://raw.githubusercontent.com/DataverseNO/dataverse-previewers/develop/6.1curlcommands.md
+# Example Curl Commands to register previewers for Dataverse, version 5.13+
+
+for id in $(curl -s http://localhost:8080/api/admin/externalTools | jq -r .data[].id); do curl -X DELETE "http://localhost:8080/api/admin/externalTools/$id";done
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Read Text",
+ "description":"Read the text file.",
+ "toolName":"textPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/TextPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/plain",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Html",
+ "description":"View the html file.",
+ "toolName":"htmlPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HtmlPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/html",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Audio",
+ "description":"Listen to an audio file.",
+ "toolName":"audioPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"audio/mp3",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Audio",
+ "description":"Listen to an audio file.",
+ "toolName":"audioPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"audio/mpeg",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Audio",
+ "description":"Listen to an audio file.",
+ "toolName":"audioPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"audio/wav",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Audio",
+ "description":"Listen to an audio file.",
+ "toolName":"audioPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"audio/ogg",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Audio",
+ "description":"Listen to an audio file.",
+ "toolName":"audioPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"audio/x-m4a",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Image",
+ "description":"Preview an image file.",
+ "toolName":"imagePreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ImagePreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"image/gif",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Image",
+ "description":"Preview an image file.",
+ "toolName":"imagePreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ImagePreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"image/jpeg",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Image",
+ "description":"Preview an image file.",
+ "toolName":"imagePreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ImagePreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"image/png",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Read Document",
+ "description":"Read a pdf document.",
+ "toolName":"pdfPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/PDFPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/pdf",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Video",
+ "description":"Watch a video file.",
+ "toolName":"videoPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/VideoPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"video/mp4",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Video",
+ "description":"Watch a video file.",
+ "toolName":"videoPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/VideoPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"video/ogg",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Play Video",
+ "description":"Watch a video file.",
+ "toolName":"videoPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/VideoPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"video/quicktime",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Data",
+ "description":"View the spreadsheet data.",
+ "toolName":"spreadsheetPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/comma-separated-values",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Data",
+ "description":"View the spreadsheet data.",
+ "toolName":"spreadsheetPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/tab-separated-values",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Data",
+ "description":"View the spreadsheet data.",
+ "toolName":"spreadsheetPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/csv",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Data",
+ "description":"View the spreadsheet data.",
+ "toolName":"spreadsheetPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/tsv",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Stata File",
+ "description":"View the Stata file as text.",
+ "toolName":"stataPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/TextPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/x-stata-syntax",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View R file",
+ "description":"View the R file as text.",
+ "toolName":"rPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/TextPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"type/x-r-syntax",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Annotations",
+ "description":"View the annotation entries in a file.",
+ "toolName":"annotationPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HypothesisPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/x-json-hypothesis",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Map",
+ "description":"View a map of the file.",
+ "toolName":"mapPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/geo+json",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+### MapViewer:
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Map",
+ "description":"View a map of the file.",
+ "toolName":"mapPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/geo+json",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+### ZIP Previewer:
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Preview Zip file",
+ "description":"Preview the structure of a Zip file.",
+ "toolName":"zipPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ZipPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/zip",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Preview ELN file",
+ "description":"Preview the structure of an ELN Archive.",
+ "toolName":"zipPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ZipPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/vnd.eln+zip",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+### NcML Previewer:
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Show NcML (XML)",
+ "description":"Metadata from HDF5 files.",
+ "toolName":"ncmlPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/NcmlPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "requirements": {
+ "auxFilesExist": [
+ {
+ "formatTag": "NcML",
+ "formatVersion": "0.1"
+ }
+ ]
+ },
+ "contentType":"application/x-hdf5",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Show NcML (XML)",
+ "description":"Metadata from NetCDF files.",
+ "toolName":"ncmlPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/NcmlPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "requirements": {
+ "auxFilesExist": [
+ {
+ "formatTag": "NcML",
+ "formatVersion": "0.1"
+ }
+ ]
+ },
+ "contentType":"application/netcdf",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+### H5Web Previewer for HDF5 and NetCDF files:
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"H5Web",
+ "description":"Explore and visualize HDF5 files",
+ "toolName":"HDF5Preview",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HDF5Preview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/x-hdf5",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"H5Web",
+ "description":"Explore and visualize HDF5 files",
+ "toolName":"HDF5Preview",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HDF5Preview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/netcdf",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+### Markdown Previewer
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Show Markdown (MD)",
+ "description":"View the Markdown file.",
+ "toolName":"mdPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MdPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/markdown",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+### ESRI Shape Previewer (beta)
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Map",
+ "description":"View a map of the file.",
+ "toolName":"mapShpPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapShpPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/zipped-shapefile",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+### GeoTIFF Previewer (beta)
+
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"View Map",
+ "description":"View a map of the file.",
+ "toolName":"mapShpPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapRasterPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"image/tiff",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+### Rich HTML Previewer - Potential Issues if used with malicious content.
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Rich HTML Previewer",
+ "description":"View the html file and run potentially malicious JavaScript. Useful for interactive HTML files that use e.g. Plotly",
+ "toolName":"richHtmlPreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/RichHtmlPreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"text/html",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
+
+### RO-Crate Previewer (beta)
+
+curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \
+'{
+ "displayName":"Show RO-Crate",
+ "description":"View the RO-Crate metadata file.",
+ "toolName":"rocratePreviewer",
+ "scope":"file",
+ "types":["preview"],
+ "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ROCratePreview.html",
+ "toolParameters": {
+ "queryParameters":[
+ {"fileid":"{fileId}"},
+ {"siteUrl":"{siteUrl}"},
+ {"datasetid":"{datasetId}"},
+ {"datasetversion":"{datasetVersion}"},
+ {"locale":"{localeCode}"}
+ ]
+ },
+ "contentType":"application/ld+json; profile=\"http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate\"",
+ "allowedApiCalls": [
+ {
+ "name": "retrieveFileContents",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true",
+ "timeOut": 3600
+ },
+ {
+ "name": "downloadFile",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false",
+ "timeOut": 3600
+ },
+ {
+ "name": "getDatasetVersionMetadata",
+ "httpMethod": "GET",
+ "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}",
+ "timeOut": 3600
+ }
+ ]
+}'
+
+
From 4478d85f4638bbd52fcec6ff4eba3ad64bb5c387 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 6 Mar 2024 09:51:46 +0100
Subject: [PATCH 295/354] ExternalTool data types and in bundle file
---
.../modification/Bundle.properties | 39 +++++++++++++++-
.../modification/Bundle.properties.patch | 44 +++++++++++++++++++
2 files changed, 82 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/modification/Bundle.properties b/distros/dataverse.no/modification/Bundle.properties
index 93ddde7..af02853 100644
--- a/distros/dataverse.no/modification/Bundle.properties
+++ b/distros/dataverse.no/modification/Bundle.properties
@@ -304,7 +304,7 @@ login.forgot.text=Forgot your password?
login.builtin=Dataverse Account
login.institution=Institutional Account
login.institution.blurb=Log in or sign up with your institutional account — more information about account creation.
-login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance.
+login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance.
login.builtin.credential.usernameOrEmail=Username/Email
login.builtin.credential.password=Password
login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
@@ -2640,6 +2640,43 @@ externaltools.dct.displayname=Data Curation Tool
externaltools.dct.description=Data Curation Tool for curation of variables
externaltools.explorer.displayname=Data Explorer
externaltools.explorer.description=The Data Explorer provides a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis.
+externaltools.textPreviewer.displayname=Read Text
+externaltools.textPreviewer.description=Read the text file.
+externaltools.htmlPreviewer.displayname=View Html
+externaltools.htmlPreviewer.description=View the html file.
+externaltools.audioPreviewer.displayname=Play Audio
+externaltools.audioPreviewer.description=Listen to an audio file.
+externaltools.imagePreviewer.displayname=View Image
+externaltools.imagePreviewer.description=Preview an image file.
+externaltools.pdfPreviewer.displayname=Read Document
+externaltools.pdfPreviewer.description=Read a pdf document.
+externaltools.videoPreviewer.displayname=Play Video
+externaltools.videoPreviewer.description=Watch a video file.
+externaltools.spreadsheetPreviewer.displayname=View Data
+externaltools.spreadsheetPreviewer.description=View the spreadsheet data.
+externaltools.stataPreviewer.displayname=View Stata File
+externaltools.stataPreviewer.description=View the Stata file as text.
+externaltools.rPreviewer.displayname=View R file
+externaltools.rPreviewer.description=View the R file as text.
+externaltools.annotationPreviewer.displayname=View Annotations
+externaltools.annotationPreviewer.description=View the annotation entries in a file.
+externaltools.mapPreviewer.displayname=View Map
+externaltools.mapPreviewer.description=View a map of the file.
+externaltools.zipPreviewer.displayname=Preview Zip file
+externaltools.zipPreviewer.description=Preview the structure of a Zip file.
+externaltools.ncmlPreviewer.displayname=Show NcML (XML)
+externaltools.ncmlPreviewer.description=Metadata from NetCDF files.
+externaltools.HDF5Preview.displayname=H5Web
+externaltools.HDF5Preview.description=Metadata from HDF5 files.
+externaltools.mdPreviewer.displayname=Show Markdown (MD)
+externaltools.mdPreviewer.description=View the Markdown file.
+externaltools.mapShpPreviewer.displayname=View Map
+externaltools.mapShpPreviewer.description=View a map of the file.
+externaltools.richHtmlPreviewer.displayname=Rich HTML Previewer
+externaltools.richHtmlPreviewer.description=View the html file and run potentially malicious JavaScript. Useful for interactive HTML files that use e.g. Plotly
+externaltools.rocratePreviewer.displayname=Show RO-Crate
+externaltools.rocratePreviewer.description=View the RO-Crate metadata file.
+
# api/admin/datasetfield/load
api.admin.datasetfield.load.ArrayIndexOutOfBoundMessage=Error parsing metadata block in {0} part, line #{1}: missing ''{2}'' column (#{3})
diff --git a/distros/dataverse.no/modification/Bundle.properties.patch b/distros/dataverse.no/modification/Bundle.properties.patch
index 6dadd3e..51b3a0c 100644
--- a/distros/dataverse.no/modification/Bundle.properties.patch
+++ b/distros/dataverse.no/modification/Bundle.properties.patch
@@ -11,3 +11,47 @@
login.builtin.credential.usernameOrEmail=Username/Email
login.builtin.credential.password=Password
login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account?
+@@ -2640,6 +2640,43 @@
+ externaltools.dct.description=Data Curation Tool for curation of variables
+ externaltools.explorer.displayname=Data Explorer
+ externaltools.explorer.description=The Data Explorer provides a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis.
++externaltools.textPreviewer.displayname=Read Text
++externaltools.textPreviewer.description=Read the text file.
++externaltools.htmlPreviewer.displayname=View Html
++externaltools.htmlPreviewer.description=View the html file.
++externaltools.audioPreviewer.displayname=Play Audio
++externaltools.audioPreviewer.description=Listen to an audio file.
++externaltools.imagePreviewer.displayname=View Image
++externaltools.imagePreviewer.description=Preview an image file.
++externaltools.pdfPreviewer.displayname=Read Document
++externaltools.pdfPreviewer.description=Read a pdf document.
++externaltools.videoPreviewer.displayname=Play Video
++externaltools.videoPreviewer.description=Watch a video file.
++externaltools.spreadsheetPreviewer.displayname=View Data
++externaltools.spreadsheetPreviewer.description=View the spreadsheet data.
++externaltools.stataPreviewer.displayname=View Stata File
++externaltools.stataPreviewer.description=View the Stata file as text.
++externaltools.rPreviewer.displayname=View R file
++externaltools.rPreviewer.description=View the R file as text.
++externaltools.annotationPreviewer.displayname=View Annotations
++externaltools.annotationPreviewer.description=View the annotation entries in a file.
++externaltools.mapPreviewer.displayname=View Map
++externaltools.mapPreviewer.description=View a map of the file.
++externaltools.zipPreviewer.displayname=Preview Zip file
++externaltools.zipPreviewer.description=Preview the structure of a Zip file.
++externaltools.ncmlPreviewer.displayname=Show NcML (XML)
++externaltools.ncmlPreviewer.description=Metadata from NetCDF files.
++externaltools.HDF5Preview.displayname=H5Web
++externaltools.HDF5Preview.description=Metadata from HDF5 files.
++externaltools.mdPreviewer.displayname=Show Markdown (MD)
++externaltools.mdPreviewer.description=View the Markdown file.
++externaltools.mapShpPreviewer.displayname=View Map
++externaltools.mapShpPreviewer.description=View a map of the file.
++externaltools.richHtmlPreviewer.displayname=Rich HTML Previewer
++externaltools.richHtmlPreviewer.description=View the html file and run potentially malicious JavaScript. Useful for interactive HTML files that use e.g. Plotly
++externaltools.rocratePreviewer.displayname=Show RO-Crate
++externaltools.rocratePreviewer.description=View the RO-Crate metadata file.
++
+
+ # api/admin/datasetfield/load
+ api.admin.datasetfield.load.ArrayIndexOutOfBoundMessage=Error parsing metadata block in {0} part, line #{1}: missing ''{2}'' column (#{3})
\ No newline at end of file
From 9eb8a0b5b3891783271fe55e2ea6cc7177be6f85 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 6 Mar 2024 09:52:05 +0100
Subject: [PATCH 296/354] Add Gitignore file
---
.gitignore | 447 +++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 447 insertions(+)
diff --git a/.gitignore b/.gitignore
index ade617f..e64d14f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,450 @@ dataverse.war
#Ignoring letsencrpt folders for SSL
letsencrypt
letsencrypt/*
+
+# Created by https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,java
+# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudio,visualstudiocode,java
+
+### VisualStudioCode ###
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+!.vscode/*.code-snippets
+
+# Local History for Visual Studio Code
+.history/
+
+# Built Visual Studio Code Extensions
+*.vsix
+
+### VisualStudioCode Patch ###
+# Ignore all local history of files
+.history
+.ionide
+
+### VisualStudio ###
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
+
+# User-specific files
+*.rsuser
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Mono auto generated files
+mono_crash.*
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+[Ww][Ii][Nn]32/
+[Aa][Rr][Mm]/
+[Aa][Rr][Mm]64/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+[Ll]ogs/
+
+# Visual Studio 2015/2017 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+Generated\ Files/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUnit
+*.VisualState.xml
+TestResult.xml
+nunit-*.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# Benchmark Results
+BenchmarkDotNet.Artifacts/
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+
+# ASP.NET Scaffolding
+ScaffoldingReadMe.txt
+
+# StyleCop
+StyleCopReport.xml
+
+# Files built by Visual Studio
+*_i.c
+*_p.c
+*_h.h
+*.ilk
+*.meta
+*.obj
+*.iobj
+*.pch
+*.pdb
+*.ipdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*_wpftmp.csproj
+*.log
+*.tlog
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# Visual Studio Trace Files
+*.e2e
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# AxoCover is a Code Coverage Tool
+.axoCover/*
+!.axoCover/settings.json
+
+# Coverlet is a free, cross platform Code Coverage Tool
+coverage*.json
+coverage*.xml
+coverage*.info
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# NuGet Symbol Packages
+*.snupkg
+# The packages folder can be ignored because of Package Restore
+**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/[Pp]ackages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+*.appx
+*.appxbundle
+*.appxupload
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!?*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+ServiceFabricBackup/
+*.rptproj.bak
+
+# SQL Server files
+*.mdf
+*.ldf
+*.ndf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+*.rptproj.rsuser
+*- [Bb]ackup.rdl
+*- [Bb]ackup ([0-9]).rdl
+*- [Bb]ackup ([0-9][0-9]).rdl
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio 6 auto-generated project file (contains which files were open etc.)
+*.vbp
+
+# Visual Studio 6 workspace and project file (working project files containing files to include in project)
+*.dsw
+*.dsp
+
+# Visual Studio 6 technical files
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# CodeRush personal settings
+.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+*.tss
+
+# Telerik's JustMock configuration file
+*.jmconfig
+
+# BizTalk build output
+*.btp.cs
+*.btm.cs
+*.odx.cs
+*.xsd.cs
+
+# OpenCover UI analysis results
+OpenCover/
+
+# Azure Stream Analytics local run output
+ASALocalRun/
+
+# MSBuild Binary and Structured Log
+*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+.mfractor/
+
+# Local History for Visual Studio
+.localhistory/
+
+# Visual Studio History (VSHistory) files
+.vshistory/
+
+# BeatPulse healthcheck temp database
+healthchecksdb
+
+# Backup folder for Package Reference Convert tool in Visual Studio 2017
+MigrationBackup/
+
+# Ionide (cross platform F# VS Code tools) working folder
+.ionide/
+
+# Fody - auto-generated XML schema
+FodyWeavers.xsd
+
+# VS Code files for those working on multiple tools
+*.code-workspace
+
+# Local History for Visual Studio Code
+
+# Windows Installer files from build outputs
+*.cab
+*.msi
+*.msix
+*.msm
+*.msp
+
+# JetBrains Rider
+*.sln.iml
+
+### VisualStudio Patch ###
+# Additional files built by Visual Studio
+
+
+### Java ###
+# Compiled class file
+*.class
+
+# Log file
+*.log
+
+# BlueJ files
+*.ctxt
+
+# Mobile Tools for Java (J2ME)
+.mtj.tmp/
+
+# Package Files #
+*.jar
+*.war
+*.nar
+*.ear
+*.zip
+*.tar.gz
+*.rar
+
+# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
+hs_err_pid*
+replay_pid*
+
+# End of https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,java
+
From 1eb77ce98c0f9d00f1b04a8e3406acbdfa2137e4 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 6 Mar 2024 14:47:46 +0100
Subject: [PATCH 297/354] change 0000-preboot.sh
---
distros/dataverse.no/init.d/0000-preboot.sh | 33 ++++++++++++++++++++-
1 file changed, 32 insertions(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/0000-preboot.sh b/distros/dataverse.no/init.d/0000-preboot.sh
index 997bcfc..10d4069 100755
--- a/distros/dataverse.no/init.d/0000-preboot.sh
+++ b/distros/dataverse.no/init.d/0000-preboot.sh
@@ -1,5 +1,6 @@
#/bin/bash
echo > ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.files.storage-driver-id=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.type=s3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.label=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.bucket-name=${aws_bucket_name}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
@@ -7,7 +8,6 @@ echo "create-system-properties dataverse.files.S3.download-redirect=true" >> ${I
echo "create-system-properties dataverse.files.S3.upload-redirect=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.url-expiration-minutes=120" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.connection-pool-size=4096" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
-echo "create-system-properties dataverse.files.storage-driver-id=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.profile=${aws_s3_profile}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
@@ -17,3 +17,34 @@ endpoint=$aws_endpoint_url
echo "create-system-properties dataverse.files.S3.access-key="$keyid >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.secret-key="$secret_key >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.files.S3.custom-endpoint-url=$endpoint" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#V5.13
+echo "create-system-properties dataverse.files.uploads=/opt/payara/appserver/glassfish/domains/domain1/uploads" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.siteUrl="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.files.directory=/tmp/dataverse">> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.rserve.port=6311" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.rserve.user=rserve" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.rserve.password=rserve" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.rserve.tempdir=/tmp/Rserv" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.datacite.mds-api-url=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.datacite.rest-api-url=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.datacite.username=${doi_username}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.datacite.password=${doi_password}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.handlenet.key.path=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.handlenet.key.passphrase=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.pid.handlenet.index=300" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.permalink.base-url="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.ezid.api-url="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.ezid.username=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.signposting.level1-author-limit=12" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.signposting.level1-item-limit=12" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.api.allow-incomplete-metadata="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.ui.show-validity-filter=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.ui.allow-review-for-incomplete=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.spi.export.directory=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.mail.support-email=support@dataverse.no" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.mail.cc-support-on-contact-emails=support@dataverse.no" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.netcdf.geo-extract-s3-direct-upload=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
From 93c1739e637058204813ca86c2b8560326287cc3 Mon Sep 17 00:00:00 2001
From: root
Date: Wed, 13 Mar 2024 10:25:48 +0000
Subject: [PATCH 298/354] Configuration for version 5.14
---
distros/dataverse.no/configs/citation.tsv | 326 ++++++++++++++++++
distros/dataverse.no/configs/http-ssl.conf | 50 ++-
distros/dataverse.no/configs/schema.xml | 6 +-
distros/dataverse.no/docker-compose.yaml | 156 +++++----
.../dataverse.no/init.d/010-mailrelay-set.sh | 5 +
distros/dataverse.no/init.d/201-bundle.sh | 2 +-
distros/dataverse.no/init.d/preboot.payara | 23 ++
restart-dataverse.sh | 0
8 files changed, 480 insertions(+), 88 deletions(-)
create mode 100644 distros/dataverse.no/configs/citation.tsv
create mode 100644 distros/dataverse.no/init.d/preboot.payara
mode change 100644 => 100755 restart-dataverse.sh
diff --git a/distros/dataverse.no/configs/citation.tsv b/distros/dataverse.no/configs/citation.tsv
new file mode 100644
index 0000000..18bc31c
--- /dev/null
+++ b/distros/dataverse.no/configs/citation.tsv
@@ -0,0 +1,326 @@
+#metadataBlock name dataverseAlias displayName blockURI
+ citation Citation Metadata https://dataverse.org/schema/citation/
+#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI
+ title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title
+ subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation
+ alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE FALSE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative
+ alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution
+ otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation
+ otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation
+ otherIdValue Identifier Another identifier uniquely identifies the Dataset text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation
+ author Author The entity, e.g. a person or organization, that created the Dataset none 7 FALSE FALSE TRUE FALSE TRUE TRUE citation http://purl.org/dc/terms/creator
+ authorName Name The name of the author, such as the person's name or the name of an organization 1) Family Name, Given Name or 2) Organization XYZ text 8 #VALUE TRUE FALSE FALSE TRUE TRUE TRUE author citation
+ authorAffiliation Affiliation The name of the entity affiliated with the author, e.g. an organization's name Organization XYZ text 9 (#VALUE) TRUE FALSE FALSE TRUE TRUE FALSE author citation
+ authorIdentifierScheme Identifier Type The type of identifier that uniquely identifies the author (e.g. ORCID, ISNI) text 10 - #VALUE: FALSE TRUE FALSE FALSE TRUE FALSE author citation http://purl.org/spar/datacite/AgentIdentifierScheme
+ authorIdentifier Identifier Uniquely identifies the author when paired with an identifier type text 11 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE author citation http://purl.org/spar/datacite/AgentIdentifier
+ datasetContact Point of Contact The entity, e.g. a person or organization, that users of the Dataset can contact with questions none 12 FALSE FALSE TRUE FALSE TRUE TRUE citation
+ datasetContactName Name The name of the point of contact, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 13 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation
+ datasetContactAffiliation Affiliation The name of the entity affiliated with the point of contact, e.g. an organization's name Organization XYZ text 14 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation
+ datasetContactEmail E-mail The point of contact's email address name@email.xyz email 15 #EMAIL FALSE FALSE FALSE FALSE TRUE TRUE datasetContact citation
+ dsDescription Description A summary describing the purpose, nature, and scope of the Dataset none 16 FALSE FALSE TRUE FALSE TRUE TRUE citation
+ dsDescriptionValue Text A summary describing the purpose, nature, and scope of the Dataset textbox 17 #VALUE TRUE FALSE FALSE FALSE TRUE TRUE dsDescription citation
+ dsDescriptionDate Date The date when the description was added to the Dataset. If the Dataset contains more than one description, e.g. the data producer supplied one description and the data repository supplied another, this date is used to distinguish between the descriptions YYYY-MM-DD date 18 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE dsDescription citation
+ subject Subject The area of study relevant to the Dataset text 19 TRUE TRUE TRUE TRUE TRUE TRUE citation http://purl.org/dc/terms/subject
+ keyword Keyword A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation
+ keywordValue Term A key term that describes important aspects of the Dataset text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation
+ keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation
+ keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation
+ topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation
+ topicClassValue Term A topic or subject term text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation
+ topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation
+ topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation
+ publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy
+ publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation
+ publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme
+ publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier
+ publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution
+ notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation
+ language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language
+ producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation
+ producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation
+ producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation
+ producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation
+ producerURL URL The URL of the producer's website https:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation
+ producerLogoURL Logo URL The URL of the producer's logo https:// url 40 FALSE FALSE FALSE FALSE FALSE FALSE producer citation
+ productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation
+ productionPlace Production Location The location where the data and any related materials were produced or collected text 42 TRUE FALSE TRUE TRUE FALSE FALSE citation
+ contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor
+ contributorType Type Indicates the type of contribution made to the dataset text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation
+ contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation
+ grantNumber Funding Information Information about the Dataset's financial support none 46 : FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/sponsor
+ grantNumberAgency Agency The agency that provided financial support for the Dataset Organization XYZ text 47 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation
+ grantNumberValue Identifier The grant identifier or contract identifier of the agency that provided financial support for the Dataset text 48 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation
+ distributor Distributor The entity, such as a person or organization, designated to generate copies of the Dataset, including any editions or revisions none 49 FALSE FALSE TRUE FALSE FALSE FALSE citation
+ distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation
+ distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation
+ distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation
+ distributorURL URL The URL of the distributor's webpage https:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation
+ distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54 FALSE FALSE FALSE FALSE FALSE FALSE distributor citation
+ distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation
+ depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation
+ dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted
+ timePeriodCovered Time Period The time period that the data refer to. Also known as span. This is the time period covered by the data, not the dates of coding, collecting data, or making documents machine-readable none 58 ; FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/temporalCoverage
+ timePeriodCoveredStart Start Date The start date of the time period that the data refer to YYYY-MM-DD date 59 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation
+ timePeriodCoveredEnd End Date The end date of the time period that the data refer to YYYY-MM-DD date 60 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation
+ dateOfCollection Date of Collection The dates when the data were collected or generated none 61 ; FALSE FALSE TRUE FALSE FALSE FALSE citation
+ dateOfCollectionStart Start Date The date when the data collection started YYYY-MM-DD date 62 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation
+ dateOfCollectionEnd End Date The date when the data collection ended YYYY-MM-DD date 63 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation
+ kindOfData Data Type The type of data included in the files (e.g. survey data, clinical data, or machine-readable text) text 64 TRUE FALSE TRUE TRUE FALSE FALSE citation http://rdf-vocabulary.ddialliance.org/discovery#kindOfData
+ series Series Information about the dataset series to which the Dataset belong none 65 : FALSE FALSE TRUE FALSE FALSE FALSE citation
+ seriesName Name The name of the dataset series text 66 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE series citation
+ seriesInformation Information Can include 1) a history of the series and 2) a summary of features that apply to the series textbox 67 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE series citation
+ software Software Information about the software used to generate the Dataset none 68 , FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasGeneratedBy
+ softwareName Name The name of software used to generate the Dataset text 69 #VALUE FALSE TRUE FALSE FALSE FALSE FALSE software citation
+ softwareVersion Version The version of the software used to generate the Dataset, e.g. 4.11 text 70 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation
+ relatedMaterial Related Material Information, such as a persistent ID or citation, about the material related to the Dataset, such as appendices or sampling information available outside of the Dataset textbox 71 FALSE FALSE TRUE FALSE FALSE FALSE citation
+ relatedDatasets Related Dataset Information, such as a persistent ID or citation, about a related dataset, such as previous research on the Dataset's subject textbox 72 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/relation
+ otherReferences Other Reference Information, such as a persistent ID or citation, about another type of resource that provides background or supporting material to the Dataset text 73 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/references
+ dataSources Data Source Information, such as a persistent ID or citation, about sources of the Dataset (e.g. a book, article, serial, or machine-readable data file) textbox 74 FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasDerivedFrom
+ originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation
+ characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation
+ accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation
+#controlledVocabulary DatasetField Value identifier displayOrder
+ subject Agricultural Sciences D01 0
+ subject Arts and Humanities D0 1
+ subject Astronomy and Astrophysics D1 2
+ subject Business and Management D2 3
+ subject Chemistry D3 4
+ subject Computer and Information Science D7 5
+ subject Earth and Environmental Sciences D4 6
+ subject Engineering D5 7
+ subject Law D8 8
+ subject Mathematical Sciences D9 9
+ subject Medicine, Health and Life Sciences D6 10
+ subject Physics D10 11
+ subject Social Sciences D11 12
+ subject Other D12 13
+ publicationIDType ark 0
+ publicationIDType arXiv 1
+ publicationIDType bibcode 2
+ publicationIDType cstr 3
+ publicationIDType doi 4
+ publicationIDType ean13 5
+ publicationIDType eissn 6
+ publicationIDType handle 7
+ publicationIDType isbn 8
+ publicationIDType issn 9
+ publicationIDType istc 10
+ publicationIDType lissn 11
+ publicationIDType lsid 12
+ publicationIDType pmid 13
+ publicationIDType purl 14
+ publicationIDType upc 15
+ publicationIDType url 16
+ publicationIDType urn 17
+ publicationIDType DASH-NRS 18
+ contributorType Data Collector 0
+ contributorType Data Curator 1
+ contributorType Data Manager 2
+ contributorType Editor 3
+ contributorType Funder 4
+ contributorType Hosting Institution 5
+ contributorType Project Leader 6
+ contributorType Project Manager 7
+ contributorType Project Member 8
+ contributorType Related Person 9
+ contributorType Researcher 10
+ contributorType Research Group 11
+ contributorType Rights Holder 12
+ contributorType Sponsor 13
+ contributorType Supervisor 14
+ contributorType Work Package Leader 15
+ contributorType Other 16
+ authorIdentifierScheme ORCID 0
+ authorIdentifierScheme ISNI 1
+ authorIdentifierScheme LCNA 2
+ authorIdentifierScheme VIAF 3
+ authorIdentifierScheme GND 4
+ authorIdentifierScheme DAI 5
+ authorIdentifierScheme ResearcherID 6
+ authorIdentifierScheme ScopusID 7
+ language Abkhaz 0
+ language Afar 1 aar aa
+ language Afrikaans 2 afr af
+ language Akan 3 aka ak
+ language Albanian 4 sqi alb sq
+ language Amharic 5 amh am
+ language Arabic 6 ara ar
+ language Aragonese 7 arg an
+ language Armenian 8 hye arm hy
+ language Assamese 9 asm as
+ language Avaric 10 ava av
+ language Avestan 11 ave ae
+ language Aymara 12 aym ay
+ language Azerbaijani 13 aze az
+ language Bambara 14 bam bm
+ language Bashkir 15 bak ba
+ language Basque 16 eus baq eu
+ language Belarusian 17 bel be
+ language Bengali, Bangla 18 ben bn
+ language Bihari 19 bih bh
+ language Bislama 20 bis bi
+ language Bosnian 21 bos bs
+ language Breton 22 bre br
+ language Bulgarian 23 bul bg
+ language Burmese 24 mya bur my
+ language Catalan,Valencian 25 cat ca
+ language Chamorro 26 cha ch
+ language Chechen 27 che ce
+ language Chichewa, Chewa, Nyanja 28 nya ny
+ language Chinese 29 zho chi zh
+ language Chuvash 30 chv cv
+ language Cornish 31 cor kw
+ language Corsican 32 cos co
+ language Cree 33 cre cr
+ language Croatian 34 hrv src hr
+ language Czech 35 ces cze cs
+ language Danish 36 dan da
+ language Divehi, Dhivehi, Maldivian 37 div dv
+ language Dutch 38 nld dut nl
+ language Dzongkha 39 dzo dz
+ language English 40 eng en
+ language Esperanto 41 epo eo
+ language Estonian 42 est et
+ language Ewe 43 ewe ee
+ language Faroese 44 fao fo
+ language Fijian 45 fij fj
+ language Finnish 46 fin fi
+ language French 47 fra fre fr
+ language Fula, Fulah, Pulaar, Pular 48 ful ff
+ language Galician 49 glg gl
+ language Georgian 50 kat geo ka
+ language German 51 deu ger de
+ language Greek (modern) 52 gre ell el
+ language Guaraní 53 grn gn
+ language Gujarati 54 guj gu
+ language Haitian, Haitian Creole 55 hat ht
+ language Hausa 56 hau ha
+ language Hebrew (modern) 57 heb he
+ language Herero 58 her hz
+ language Hindi 59 hin hi
+ language Hiri Motu 60 hmo ho
+ language Hungarian 61 hun hu
+ language Interlingua 62 ina ia
+ language Indonesian 63 ind id
+ language Interlingue 64 ile ie
+ language Irish 65 gle ga
+ language Igbo 66 ibo ig
+ language Inupiaq 67 ipk ik
+ language Ido 68 ido io
+ language Icelandic 69 isl ice is
+ language Italian 70 ita it
+ language Inuktitut 71 iku iu
+ language Japanese 72 jpn ja
+ language Javanese 73 jav jv
+ language Kalaallisut, Greenlandic 74 kal kl
+ language Kannada 75 kan kn
+ language Kanuri 76 kau kr
+ language Kashmiri 77 kas ks
+ language Kazakh 78 kaz kk
+ language Khmer 79 khm km
+ language Kikuyu, Gikuyu 80 kik ki
+ language Kinyarwanda 81 kin rw
+ language Kyrgyz 82
+ language Komi 83 kom kv
+ language Kongo 84 kon kg
+ language Korean 85 kor ko
+ language Kurdish 86 kur ku
+ language Kwanyama, Kuanyama 87 kua kj
+ language Latin 88 lat la
+ language Luxembourgish, Letzeburgesch 89 ltz lb
+ language Ganda 90 lug lg
+ language Limburgish, Limburgan, Limburger 91 lim li
+ language Lingala 92 lin ln
+ language Lao 93 lao lo
+ language Lithuanian 94 lit lt
+ language Luba-Katanga 95 lub lu
+ language Latvian 96 lav lv
+ language Manx 97 glv gv
+ language Macedonian 98 mkd mac mk
+ language Malagasy 99 mlg mg
+ language Malay 100 may msa ms
+ language Malayalam 101 mal ml
+ language Maltese 102 mlt mt
+ language Māori 103 mao mri mi
+ language Marathi (Marāṭhī) 104 mar mr
+ language Marshallese 105 mah mh
+ language Mixtepec Mixtec 106 mix
+ language Mongolian 107 mon mn
+ language Nauru 108 nau na
+ language Navajo, Navaho 109 nav nv
+ language Northern Ndebele 110 nde nd
+ language Nepali 111 nep ne
+ language Ndonga 112 ndo ng
+ language Norwegian Bokmål 113 nob nb
+ language Norwegian Nynorsk 114 nno nn
+ language Norwegian 115 nor no
+ language Nuosu 116
+ language Southern Ndebele 117 nbl nr
+ language Occitan 118 oci oc
+ language Ojibwe, Ojibwa 119 oji oj
+ language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu
+ language Oromo 121 orm om
+ language Oriya 122 ori or
+ language Ossetian, Ossetic 123 oss os
+ language Panjabi, Punjabi 124 pan pa
+ language Pāli 125 pli pi
+ language Persian (Farsi) 126 per fas fa
+ language Polish 127 pol pl
+ language Pashto, Pushto 128 pus ps
+ language Portuguese 129 por pt
+ language Quechua 130 que qu
+ language Romansh 131 roh rm
+ language Kirundi 132 run rn
+ language Romanian 133 ron rum ro
+ language Russian 134 rus ru
+ language Sanskrit (Saṁskṛta) 135 san sa
+ language Sardinian 136 srd sc
+ language Sindhi 137 snd sd
+ language Northern Sami 138 sme se
+ language Samoan 139 smo sm
+ language Sango 140 sag sg
+ language Serbian 141 srp scc sr
+ language Scottish Gaelic, Gaelic 142 gla gd
+ language Shona 143 sna sn
+ language Sinhala, Sinhalese 144 sin si
+ language Slovak 145 slk slo sk
+ language Slovene 146 slv sl
+ language Somali 147 som so
+ language Southern Sotho 148 sot st
+ language Spanish, Castilian 149 spa es
+ language Sundanese 150 sun su
+ language Swahili 151 swa sw
+ language Swati 152 ssw ss
+ language Swedish 153 swe sv
+ language Tamil 154 tam ta
+ language Telugu 155 tel te
+ language Tajik 156 tgk tg
+ language Thai 157 tha th
+ language Tigrinya 158 tir ti
+ language Tibetan Standard, Tibetan, Central 159 tib bod bo
+ language Turkmen 160 tuk tk
+ language Tagalog 161 tgl tl
+ language Tswana 162 tsn tn
+ language Tonga (Tonga Islands) 163 ton to
+ language Turkish 164 tur tr
+ language Tsonga 165 tso ts
+ language Tatar 166 tat tt
+ language Twi 167 twi tw
+ language Tahitian 168 tah ty
+ language Uyghur, Uighur 169 uig ug
+ language Ukrainian 170 ukr uk
+ language Urdu 171 urd ur
+ language Uzbek 172 uzb uz
+ language Venda 173 ven ve
+ language Vietnamese 174 vie vi
+ language Volapük 175 vol vo
+ language Walloon 176 wln wa
+ language Welsh 177 cym wel cy
+ language Wolof 178 wol wo
+ language Western Frisian 179 fry fy
+ language Xhosa 180 xho xh
+ language Yiddish 181 yid yi
+ language Yoruba 182 yor yo
+ language Zhuang, Chuang 183 zha za
+ language Zulu 184 zul zu
+ language Not applicable 185
diff --git a/distros/dataverse.no/configs/http-ssl.conf b/distros/dataverse.no/configs/http-ssl.conf
index fc5b0ac..79a2eb8 100755
--- a/distros/dataverse.no/configs/http-ssl.conf
+++ b/distros/dataverse.no/configs/http-ssl.conf
@@ -2,7 +2,7 @@
# When we also provide SSL we have to listen to the
# the HTTPS port in addition.
#
-Listen 9443 https
+Listen 443 https
TimeOut 600
LimitRequestBody 0
@@ -56,14 +56,16 @@ SSLCryptoDevice builtin
##
#
- ServerName test-docker.dataverse.no
+ ServerName test-docker-dataverse.azure.uit.no
+# ServerName default
DocumentRoot /var/www/html
- #ErrorLog /var/logs/http-error_log
- #CustomLog /var/logs/http-access_log combined env=!monitor
+# ErrorLog /var/logs/http-error_log
+# CustomLog /var/logs/http-access_log combined env=!monitor
+
+ #Header always set X-Frame-Options "SAMEORIGIN"
+ #Header always set X-XSS-Protection "1; mode=block"
+ #Header always set X-Content-Type-Options "nosniff"
- Header always set X-Frame-Options "SAMEORIGIN"
- Header always set X-XSS-Protection "1; mode=block"
- Header always set X-Content-Type-Options "nosniff"
Options None
@@ -76,21 +78,22 @@ SSLCryptoDevice builtin
-
+
# General setup for the virtual host, inherited from global configuration
#DocumentRoot "/var/www/html"
#ServerName www.example.com:443
-ServerName test-docker.dataverse.no
+ServerName test-docker-dataverse.azure.uit.no
+#ServerName default
Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains"
# Content-Security-Policy: noen java-filer laster fra http, så denne kan
# ikke brukes.
#Header always set Content-Security-Policy "default-src https:"
-Header always set X-Frame-Options "SAMEORIGIN"
-Header always set X-XSS-Protection "1; mode=block"
-Header always set X-Content-Type-Options "nosniff"
+#Header always set X-Frame-Options "SAMEORIGIN"
+#Header always set X-XSS-Protection "1; mode=block"
+#Header always set X-Content-Type-Options "nosniff"
#:443
# Use separate log files for the SSL virtual host; note that LogLevel
@@ -116,7 +119,7 @@ SSLEngine on
# SSL Protocol support:
# List the enable protocol levels with which clients will be able to
# connect. Disable SSLv2 access by default:
-SSLProtocol +all +TLSv1.3 +TLSv1.2 -SSLv2 -SSLv3 -TLSv1 -TLSv1.1
+SSLProtocol +TLSv1.3 +TLSv1.2
SSLCipherSuite ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256
SSLHonorCipherOrder on
SSLCompression off
@@ -203,7 +206,7 @@ ShibCompatValidUser Off
AuthType shibboleth
ShibRequestSetting requireSession 1
require valid-user
-
+
@@ -231,16 +234,27 @@ ShibCompatValidUser Off
ShibRequestSetting requireSession 1
require shib-session
+
+
+ SSLProxyVerify none
+ SSLProxyCheckPeerCN off
+ SSLProxyCheckPeerName off
+ SSLProxyCheckPeerExpire off
+
ProxyPassInterpolateEnv On
ProxyPassMatch ^/Shibboleth.sso !
ProxyPassMatch ^/shibboleth-ds !
ProxyPassMatch ^/phpPgAdmin !
ProxyPassMatch ^/nav !
ProxyPassMatch ^/minio !
-ProxyPass / ajp://dataverse:8009/ interpolate
-ProxyPassReverse / ajp://dataverse:8009/ interpolate
-ProxyPassReverseCookieDomain "dataverse" "test.dataverse.no" interpolate
-ProxyPassReverseCookiePath "/" "/" interpolate
+ProxyPass "/" "ajp://dataverse:8009/" timeout=600
+ProxyPassReverse "/" "ajp://dataverse:8009/" timeout=600
+#ProxyPass "/" "ajp://dataverse:8009/" interpolate
+#ProxyPassReverse "/" "ajp://dataverse:8009/" interpolate
+#ProxyPass / http://dataverse:8080/ interpolate
+#ProxyPassReverse / http://dataverse:8080/ interpolate
+ProxyPassReverseCookieDomain "dataverse" "test-docker-dataverse.azure.uit.no"
+ProxyPassReverseCookiePath "/" "/"
#AuthType Basic
diff --git a/distros/dataverse.no/configs/schema.xml b/distros/dataverse.no/configs/schema.xml
index 1718962..5fe31aa 100644
--- a/distros/dataverse.no/configs/schema.xml
+++ b/distros/dataverse.no/configs/schema.xml
@@ -359,9 +359,9 @@
-
-
-
+
+
+
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index a49437c..9c327e4 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -2,34 +2,41 @@ version: '3.7'
# Settings and configurations that are common for all containers
services:
- reverse-proxy:
+ #reverse-proxy:
# The official v2 Traefik docker image
- image: traefik:v2.10.3
+ # image: traefik:v2.10.3
# Enables the web UI and tells Traefik to listen to docker
- container_name: traefik
- command:
- - "--api.insecure=true"
- - "--providers.docker=true"
- - "--providers.docker.exposedbydefault=false"
- - --providers.docker.network=traefik
- - "--entrypoints.web.address=:80"
- - "--entrypoints.websecure.address=:443"
- - "--entrypoints.web.http.redirections.entryPoint.to=websecure"
- - "--entrypoints.web.http.redirections.entryPoint.scheme=https"
- - "--entrypoints.web.http.redirections.entryPoint.priority=10" # disable permanent forwarding for every route
- - --providers.file.filename=/var/traefik2/certs/certificates.toml
- - --providers.file.watch=true
- hostname: ${hostname}
- networks:
- - traefik
- ports:
- - 80:80
- - 9443:443
- volumes:
- - /etc/localtime:/etc/localtime:ro
- - /var/run/docker.sock:/var/run/docker.sock:ro
- - "${CONFIGURATION_PATH}/configuration/files:/var/traefik2/certs"
- - "${CONFIGURATION_PATH}/configuration/:/configuration/"
+ # container_name: traefik
+ # command:
+ # - "--api.insecure=true"
+ # - "--providers.docker=true"
+ # - "--providers.docker.exposedbydefault=false"
+ # - "--providers.docker.network=traefik"
+ # - "--entrypoints.web.address=:80"
+ # - "--entrypoints.websecure.address=:443"
+ # - "--entrypoints.web.http.redirections.entryPoint.to=websecure"
+ # - "--entrypoints.web.http.redirections.entryPoint.scheme=https"
+ # - "--providers.file.filename=/var/traefik2/certs/certificates.toml"
+ # - "--providers.file.watch=true"
+ # - "--log.level=DEBUG"
+ # hostname: ${hostname}
+ # networks:
+ # - traefik
+ # ports:
+ # - 80:80
+ # - 9443:443
+ # - 8090:8080
+ # volumes:
+ # - /etc/localtime:/etc/localtime:ro
+ # - /var/run/docker.sock:/var/run/docker.sock:ro
+ # - "${CONFIGURATION_PATH}/configuration/files:/var/traefik2/certs"
+ # - "${CONFIGURATION_PATH}/configuration/:/configuration/"
+ # labels:
+ # - "traefik.enable=true"
+ # - "traefik.frontend.rule=Host(`${traefikhost}`)"
+ # - "traefik.port=8080"
+
+ #- "--entrypoints.web.http.redirections.entryPoint.priority=10" # disable permanent forwarding for every route
postgres:
networks:
@@ -58,12 +65,13 @@ services:
networks:
- traefik
#image: test03/shibboleth:3.3.0.B
- image: ${DOCKER_HUB}/shibboleth:3.4.1
+ image: ${DOCKER_HUB}/shibboleth:3.4.1-1
container_name: shibboleth
+ hostname: shibboleth
privileged: true
ports:
- "8089:80"
- - "443:9443"
+ - "443:443"
volumes:
- ${LOGS_PATH}/shibboleth/httpd:/var/log/httpd
# - ${LOGS_PATH}/shibboleth/shibboleth:/var/log/shibboleth
@@ -75,11 +83,17 @@ services:
# hostname: ${hostname}
labels:
- "traefik.enable=true"
- - traefik.http.routers.shibboleth-web.rule=Host(`${traefikhost}`) && PathPrefix(`/Shibboleth.sso`)
- - traefik.http.routers.shibboleth-web-secure.rule=Host(`${traefikhost}`) && PathPrefix(`/Shibboleth.sso`)
- - traefik.http.routers.shibboleth-web-secure.tls=true
- - traefik.http.routers.shibboleth-web-secure.entrypoints=websecure
- - "traefik.http.services.shibboleth-web-secure.loadbalancer.server.port=9443"
+ - "traefik.http.routers.shibboleth.rule=PathPrefix(`/Shibboleth.sso`, `/shibboleth-sp` )"
+ #- "traefik.tcp.routers.shibboleth.rule=Host(`${traefikhost}`) && PathPrefix(`/Shibboleth.sso`, `/shibboleth-sp` )"
+ - "traefik.http.routers.shibboleth.tls=true"
+ #- "traefik.http.services.shibboleth.loadbalancer.passhostheader=true"
+ - "traefik.http.services.shibboleth.loadbalancer.server.port=80"
+ #- "traefik.tcp.services.shibboleth.loadbalancer.server.port=80"
+ #- "traefik.http.services.shibboleth.loadbalancer.server.scheme=http"
+ #- "traefik.http.services.shibboleth.loadbalancer.server.url=${traefikhost}"
+ #- "traefik.http.middlewares.shibboleth.forwardauth.trustForwardHeader=true"
+ depends_on:
+ - dataverse
solr:
networks:
@@ -104,19 +118,19 @@ services:
- "traefik.http.routers.solr.rule=Host(`solr.${traefikhost}`)"
- "traefik.http.services.solr.loadbalancer.server.port=8983"
- "traefik.http.routers.solr.tls=true"
- - "traefik.http.routers.solr.tls.certresolver=myresolver"
+ #- "traefik.http.routers.solr.tls.certresolver=myresolver"
- whoami:
- networks:
- - traefik
- image: "traefik/whoami"
- container_name: "whoami"
- labels:
- - "traefik.enable=true"
- # - "traefik.http.routers.whoami.entrypoints=web"
- - "traefik.http.routers.whoami.rule=Host(`${traefikhost}`) && PathPrefix(`/whoami`)"
- - "traefik.http.routers.whoami.tls=true"
- - "traefik.http.routers.whoami.tls.certresolver=myresolver"
+# whoami:
+# networks:
+# - traefik
+# image: "traefik/whoami"
+# container_name: "whoami"
+# labels:
+# - "traefik.enable=true"
+# # - "traefik.http.routers.whoami.entrypoints=web"
+# - "traefik.http.routers.whoami.rule=Host(`${traefikhost}`) && PathPrefix(`/whoami`)"
+# - "traefik.http.routers.whoami.tls=true"
+# - "traefik.http.routers.whoami.tls.certresolver=myresolver"
dataverse:
networks:
@@ -124,17 +138,24 @@ services:
image: ${DOCKER_HUB}/dataverse:${VERSION}
#image: coronawhy/dataverse:${VERSION}
container_name: dataverse
+ hostname: dataverse
privileged: true
user:
"root"
ports:
#- "443:443"
- - "4849:4848"
- - "8088:8088"
- - "8080:8080"
- - "8099:8009"
- extra_hosts:
- - "${traefikhost}:51.105.181.173"
+ #- "4849:4848"
+ #- "8088:8088"
+ #- "8080:8080"
+ #- "8099:8009"
+ - "8080:8080" # HTTP (Dataverse Application)
+ - "8181:8181" # HTTPS (Dataverse Application)
+ - "4949:4848" # HTTPS (Payara Admin Console)
+ - "8009:8009" # AJP
+ - "9009:9009" # JDWP
+ - "8686:8686" # JMX
+# extra_hosts:
+# - "${traefikhost}:51.105.181.173"
environment:
- "CVM_SERVER_NAME=CESSDA" #Optional
- "CESSDA"
@@ -173,11 +194,11 @@ services:
- "SOLR_SERVICE_HOST"
- "SOLR_SERVICE_PORT"
- "CVM_SERVER_URL=https://ns.${traefikhost}"
- - "CVM_TSV_SOURCE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvocdemo.tsv"
- - "1WAR_FILE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/dataverse-5.3-cv.war"
- - "WAR_FILE=https://github.com/IQSS/dataverse/releases/download/v5.3/dataverse-5.3.war"
- - "CVM_SQL=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cv-update.sql"
- - "CVM_CONFIG=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvoc-conf.json"
+ #- "CVM_TSV_SOURCE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvocdemo.tsv"
+ #- "1WAR_FILE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/dataverse-5.3-cv.war"
+ #- "WAR_FILE=https://github.com/IQSS/dataverse/releases/download/v5.3/dataverse-5.3.war"
+ #- "CVM_SQL=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cv-update.sql"
+ #- "CVM_CONFIG=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvoc-conf.json"
- "LANG=en"
- "MAINLANG"
- "cvManager=http://"
@@ -207,6 +228,7 @@ services:
- "CONFIG_FILE"
- "PAYARA_ARGS=--debug"
- "aws_config"
+ - "PREVIEWER"
depends_on:
- postgres
- solr
@@ -224,20 +246,22 @@ services:
- /mnt:/mnt
labels:
- "traefik.enable=true"
- - "traefik.http.routers.dataverse.rule=Host(`${traefikhost}`)"
+ #- "traefik.http.routers.dataverse.rule=Host(`${traefikhost}`)"
+ - "traefik.http.routers.dataverse.rule=PathPrefix(`/`)"
- "traefik.http.services.dataverse.loadbalancer.server.port=8080"
- "traefik.http.routers.dataverse.tls=true"
- - "traefik.http.routers.dataverse.tls.certresolver=myresolver"
+ #- "traefik.http.routers.dataverse.middlewares=shibboleth"
+ #- "traefik.http.routers.dataverse.tls.certresolver=myresolver"
volumes:
solr-data:
- data1-1:
- data1-2:
- data2-1:
- data2-2:
- data3-1:
- data3-2:
- data4-1:
- data4-2:
+# data1-1:
+# data1-2:
+# data2-1:
+# data2-2:
+# data3-1:
+# data3-2:
+# data4-1:
+# data4-2:
networks:
traefik:
diff --git a/distros/dataverse.no/init.d/010-mailrelay-set.sh b/distros/dataverse.no/init.d/010-mailrelay-set.sh
index 4e6ddb9..6b7dc81 100755
--- a/distros/dataverse.no/init.d/010-mailrelay-set.sh
+++ b/distros/dataverse.no/init.d/010-mailrelay-set.sh
@@ -7,3 +7,8 @@ if [ "${system_email}" ]; then
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} delete-javamail-resource mail/notifyMailSession
asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-javamail-resource --mailhost ${mailhost} --mailuser ${mailuser} --fromaddress ${no_reply_email} --property mail.smtp.auth=false:mail.smtp.password=${smtp_password}:mail.smtp.port=${smtp_port}:mail.smtp.socketFactory.port=${socket_port}:mail.smtp.socketFactory.fallback=false mail/notifyMailSession
fi
+
+
+if [ "${support_email}" ]; then
+ asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.mail.support-email\=${support_email}"
+fi
diff --git a/distros/dataverse.no/init.d/201-bundle.sh b/distros/dataverse.no/init.d/201-bundle.sh
index 8e30c1b..8c8e46a 100755
--- a/distros/dataverse.no/init.d/201-bundle.sh
+++ b/distros/dataverse.no/init.d/201-bundle.sh
@@ -1,7 +1,7 @@
#!/bin/bash
/usr/bin/apt-get install patch -y
-/usr/bin/curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch
+/usr/bin/curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no-dev/distros/dataverse.no/modification/Bundle.properties.patch
/usr/bin/patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch
diff --git a/distros/dataverse.no/init.d/preboot.payara b/distros/dataverse.no/init.d/preboot.payara
new file mode 100644
index 0000000..46b15cf
--- /dev/null
+++ b/distros/dataverse.no/init.d/preboot.payara
@@ -0,0 +1,23 @@
+
+create-system-properties dataverse.files.storage-driver-id=S3
+create-system-properties dataverse.files.S3.type=s3
+create-system-properties dataverse.files.S3.label=S3
+create-system-properties dataverse.files.S3.bucket-name=2002-green-dataversenotest1
+create-system-properties dataverse.files.S3.download-redirect=true
+create-system-properties dataverse.files.S3.upload-redirect=true
+create-system-properties dataverse.files.S3.url-expiration-minutes=120
+create-system-properties dataverse.files.S3.connection-pool-size=4096
+create-system-properties dataverse.files.S3.profile=cloudian
+set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true
+set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true
+create-system-properties dataverse.files.S3.access-key=00c8607247f4a406d1c3
+create-system-properties dataverse.files.S3.secret-key=8PXJI5iRqpOcbwLDdS0gNW2Nf0tJJoUVMCi2oXcj
+create-system-properties dataverse.files.S3.custom-endpoint-url=https\:\/\/s3-oslo.educloud.no
+create-system-properties dataverse.files.uploads=/opt/payara/appserver/glassfish/domains/domain1/uploads
+create-system-properties dataverse.siteUrl=https\:\/\/test-docker-dataverse.azure.uit.no
+create-system-properties dataverse.files.directory=/tmp/dataverse
+create-system-properties dataverse.rserve.port=6311
+create-system-properties dataverse.rserve.user=rserve
+create-system-properties dataverse.rserve.password=rserve
+create-system-properties dataverse.rserve.tempdir=/tmp/Rserv
+create-system-properties dataverse.pid.handlenet.index=300
diff --git a/restart-dataverse.sh b/restart-dataverse.sh
old mode 100644
new mode 100755
From 0c8a114a628d22808831c7d734fd253f031654b2 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 13 Mar 2024 12:35:50 +0100
Subject: [PATCH 299/354] add distros/dataverse.no/init.d/preboot.payara to
.gitignore
---
.gitignore | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.gitignore b/.gitignore
index e64d14f..df6acab 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,6 +11,8 @@ dataverse.war
letsencrypt
letsencrypt/*
+distros/dataverse.no/init.d/preboot.payara
+
# Created by https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,java
# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudio,visualstudiocode,java
From 97cc66bdbf430e698dd2556091dd5db6fa84a72b Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 13 Mar 2024 12:40:00 +0100
Subject: [PATCH 300/354] add datacite setting correct 0000-preboot
---
distros/dataverse.no/init.d/0000-preboot.sh | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/distros/dataverse.no/init.d/0000-preboot.sh b/distros/dataverse.no/init.d/0000-preboot.sh
index 10d4069..18ae3cc 100755
--- a/distros/dataverse.no/init.d/0000-preboot.sh
+++ b/distros/dataverse.no/init.d/0000-preboot.sh
@@ -25,10 +25,10 @@ echo "create-system-properties dataverse.rserve.port=6311" >> ${INIT_SCRIPTS_FOL
echo "create-system-properties dataverse.rserve.user=rserve" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.rserve.password=rserve" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.rserve.tempdir=/tmp/Rserv" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
-#echo "create-system-properties dataverse.pid.datacite.mds-api-url=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
-#echo "create-system-properties dataverse.pid.datacite.rest-api-url=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
-#echo "create-system-properties dataverse.pid.datacite.username=${doi_username}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
-#echo "create-system-properties dataverse.pid.datacite.password=${doi_password}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.pid.datacite.mds-api-url=${baseurlstring}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.pid.datacite.rest-api-url=${dataciterestapiurlstring}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.pid.datacite.username=${doi_username}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
+echo "create-system-properties dataverse.pid.datacite.password=${doi_password}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
#echo "create-system-properties dataverse.pid.handlenet.key.path=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
#echo "create-system-properties dataverse.pid.handlenet.key.passphrase=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
echo "create-system-properties dataverse.pid.handlenet.index=300" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara
From b84e9a51c15c458c6c001781749146538278c9cd Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 13 Mar 2024 12:50:04 +0100
Subject: [PATCH 301/354] remove export ports in dataverse
---
distros/dataverse.no/docker-compose.yaml | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 9c327e4..1ffbae2 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -142,18 +142,18 @@ services:
privileged: true
user:
"root"
- ports:
+ #ports:
#- "443:443"
#- "4849:4848"
#- "8088:8088"
#- "8080:8080"
#- "8099:8009"
- - "8080:8080" # HTTP (Dataverse Application)
- - "8181:8181" # HTTPS (Dataverse Application)
- - "4949:4848" # HTTPS (Payara Admin Console)
- - "8009:8009" # AJP
- - "9009:9009" # JDWP
- - "8686:8686" # JMX
+ #- "8080:8080" # HTTP (Dataverse Application)
+ #- "8181:8181" # HTTPS (Dataverse Application)
+ #- "4949:4848" # HTTPS (Payara Admin Console)
+ #- "8009:8009" # AJP
+ #- "9009:9009" # JDWP
+ #- "8686:8686" # JMX
# extra_hosts:
# - "${traefikhost}:51.105.181.173"
environment:
From 1668ee53868d8939a67ea6419344633dc9812eb7 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 14 Mar 2024 16:14:56 +0100
Subject: [PATCH 302/354] add gitignore vim
---
.gitignore | 28 ++++++++++++++++++++++++++++
1 file changed, 28 insertions(+)
diff --git a/.gitignore b/.gitignore
index df6acab..6be1169 100644
--- a/.gitignore
+++ b/.gitignore
@@ -459,3 +459,31 @@ replay_pid*
# End of https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,java
+# Created by https://www.toptal.com/developers/gitignore/api/vim
+# Edit at https://www.toptal.com/developers/gitignore?templates=vim
+
+### Vim ###
+# Swap
+[._]*.s[a-v][a-z]
+!*.svg # comment out if you don't need vector files
+[._]*.sw[a-p]
+[._]s[a-rt-v][a-z]
+[._]ss[a-gi-z]
+[._]sw[a-p]
+
+# Session
+Session.vim
+Sessionx.vim
+
+# Temporary
+.netrwhist
+*~
+# Auto-generated tag files
+tags
+# Persistent undo
+[._]*.un~
+
+# End of https://www.toptal.com/developers/gitignore/api/vim
+
+preboot.payara
+distros/dataverse.no/init.d/preboot.payara
From a1e4d456a1a6da621f3efc48448b1069b33dbd2c Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 14 Mar 2024 16:24:57 +0100
Subject: [PATCH 303/354] change payara file add gitignor
---
.gitignore | 1 -
distros/dataverse.no/init.d/preboot.payara | 23 ----------------------
2 files changed, 24 deletions(-)
delete mode 100644 distros/dataverse.no/init.d/preboot.payara
diff --git a/.gitignore b/.gitignore
index 6be1169..f6fb909 100644
--- a/.gitignore
+++ b/.gitignore
@@ -485,5 +485,4 @@ tags
# End of https://www.toptal.com/developers/gitignore/api/vim
-preboot.payara
distros/dataverse.no/init.d/preboot.payara
diff --git a/distros/dataverse.no/init.d/preboot.payara b/distros/dataverse.no/init.d/preboot.payara
deleted file mode 100644
index 46b15cf..0000000
--- a/distros/dataverse.no/init.d/preboot.payara
+++ /dev/null
@@ -1,23 +0,0 @@
-
-create-system-properties dataverse.files.storage-driver-id=S3
-create-system-properties dataverse.files.S3.type=s3
-create-system-properties dataverse.files.S3.label=S3
-create-system-properties dataverse.files.S3.bucket-name=2002-green-dataversenotest1
-create-system-properties dataverse.files.S3.download-redirect=true
-create-system-properties dataverse.files.S3.upload-redirect=true
-create-system-properties dataverse.files.S3.url-expiration-minutes=120
-create-system-properties dataverse.files.S3.connection-pool-size=4096
-create-system-properties dataverse.files.S3.profile=cloudian
-set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true
-set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true
-create-system-properties dataverse.files.S3.access-key=00c8607247f4a406d1c3
-create-system-properties dataverse.files.S3.secret-key=8PXJI5iRqpOcbwLDdS0gNW2Nf0tJJoUVMCi2oXcj
-create-system-properties dataverse.files.S3.custom-endpoint-url=https\:\/\/s3-oslo.educloud.no
-create-system-properties dataverse.files.uploads=/opt/payara/appserver/glassfish/domains/domain1/uploads
-create-system-properties dataverse.siteUrl=https\:\/\/test-docker-dataverse.azure.uit.no
-create-system-properties dataverse.files.directory=/tmp/dataverse
-create-system-properties dataverse.rserve.port=6311
-create-system-properties dataverse.rserve.user=rserve
-create-system-properties dataverse.rserve.password=rserve
-create-system-properties dataverse.rserve.tempdir=/tmp/Rserv
-create-system-properties dataverse.pid.handlenet.index=300
From 1d3ea4f88f2d2d05497607b7edf47e23de6939ff Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 14 Mar 2024 17:45:18 +0100
Subject: [PATCH 304/354] check ETag and update script
---
distros/dataverse.no/init.d/100-analytics.sh | 2 +-
distros/dataverse.no/init.d/201-bundle.sh | 4 +---
.../dataverse.no/init.d/cronjob/backupData.sh | 19 +++++++++++----
.../init.d/cronjob/checkETagByFiles.sh | 24 +++++++++++++++++++
.../init.d/cronjob/checkfiles.sql | 3 +++
.../init.d/cronjob/dumpdatabase.sh | 19 ++++++++++++---
distros/dataverse.no/runOnce/update_5.14.sh | 8 +++++++
7 files changed, 67 insertions(+), 12 deletions(-)
create mode 100644 distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
create mode 100644 distros/dataverse.no/init.d/cronjob/checkfiles.sql
create mode 100644 distros/dataverse.no/runOnce/update_5.14.sh
diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh
index 3c9c536..bede100 100755
--- a/distros/dataverse.no/init.d/100-analytics.sh
+++ b/distros/dataverse.no/init.d/100-analytics.sh
@@ -1,4 +1,4 @@
-curl -o $DOCROOT_DIR/analytics.xhtml https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/analytics.xhtml
+curl -z -o $DOCROOT_DIR/analytics.xhtml https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/analytics.xhtml
if [ ! -z "$WEBANALYTICSON" ]
then
diff --git a/distros/dataverse.no/init.d/201-bundle.sh b/distros/dataverse.no/init.d/201-bundle.sh
index 8c8e46a..7f0c01c 100755
--- a/distros/dataverse.no/init.d/201-bundle.sh
+++ b/distros/dataverse.no/init.d/201-bundle.sh
@@ -1,7 +1,5 @@
#!/bin/bash
/usr/bin/apt-get install patch -y
-/usr/bin/curl -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no-dev/distros/dataverse.no/modification/Bundle.properties.patch
+/usr/bin/curl -z -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch
/usr/bin/patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch
-
-
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
index 90b4aca..e150ae3 100755
--- a/distros/dataverse.no/init.d/cronjob/backupData.sh
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -2,13 +2,20 @@
export PGPASSWORD=`cat /secrets/db/password`
cp -r /secrets/aws-cli/.aws ~
+# copy Database to blob and s3
+#sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
+#wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
+#apt update
+#apt install postgresql-client-15 -y
+#pg_dump -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} | gzip > /mnt/dataverse.dump.gz
-#file=10.21337/OZ4JBV/1869225dfbd-4edecc03da9e
-
-files=`psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2]}' | sed "s/S3:\/\/$aws_bucket_name://"`
-dump=`ls /data/databaseDumps/ -Art | tail -n 1`
+dumpName="dataverse.`date +%Y%m%d_%H%M%z`.dump.gz"
+cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/${dumpName}
+aws s3 --endpoint https://$aws_endpoint cp /mnt/dataverse.dump.gz s3://$aws_bucket_name/databaseDumps/${dumpName}
-aws s3 --endpoint https://$aws_endpoint cp /data/databaseDumps/$dump s3://$aws_bucket_name/databaseDumps
+# backup files
+#file=10.21337/OZ4JBV/1869225dfbd-4edecc03da9e
+files=`psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2]}' | sed "s/S3:\/\/$aws_bucket_name://"`
for file in $files
do
@@ -22,5 +29,7 @@ for file in $files
done
#echo $files
+rm ~/.aws
+
#cp -r /secrets/aws-cli/.aws ~
#aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
new file mode 100644
index 0000000..586e231
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+export PGPASSWORD=`cat /secrets/db/password`
+cp -r /secrets/aws-cli/.aws ~
+
+psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /tmp/dataverse_checkETag.txt
+
+while read p; do
+ IFS=' ' read -a arrayData <<< "$p"
+ #echo ${arrayData[0]}
+
+ s3ETag=$(aws s3api --endpoint https://$aws_endpoint head-object --bucket $aws_bucket_name --key ${arrayData[0]} 2> /dev/null | jq .ETag | sed 's/\"//g' | sed 's/\\//g')
+
+
+ if [ -z "${s3ETag}" ]; then
+ echo "is not exist in the s3 storage: ${arrayData[0]}"
+ else
+
+ if [ "${s3ETag}" != "${arrayData[1]}" ]; then
+ echo "is not equal: ${arrayData[0]}"
+ fi
+ fi
+
+done < /tmp/dataverse_checkETag.txt
diff --git a/distros/dataverse.no/init.d/cronjob/checkfiles.sql b/distros/dataverse.no/init.d/cronjob/checkfiles.sql
new file mode 100644
index 0000000..74c3ca0
--- /dev/null
+++ b/distros/dataverse.no/init.d/cronjob/checkfiles.sql
@@ -0,0 +1,3 @@
+
+--select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '2 day');
+select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile';
\ No newline at end of file
diff --git a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
index 925c1eb..fdee092 100755
--- a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
+++ b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
@@ -1,6 +1,19 @@
#!/bin/bash
-docker exec --user postgres postgres pg_dump -U dataverse dataverse > /mnt/dataverse.dump
-gzip -f /mnt/dataverse.dump
-cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/dataverse.`date +%Y%m%d_%H%M%z`.dump.gz
+
+#
+#
+
+
+#docker exec --user postgres postgres pg_dump -U dataverse dataverse > /mnt/dataverse.dump
+#gzip -f /mnt/dataverse.dump
+
+# dump databases
+docker exec --user postgres postgres pg_dump -U dataverse dataverse | gzip > /mnt/dataverse.dump.gz
+#docker exec --user postgres postgres pg_dump -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} | gzip > /mnt/dataverse.dump.gz
+
+#cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/dataverse.`date +%Y%m%d_%H%M%z`.dump.gz
rsync -arvP --rsh=ssh /mnt/dataverse.dump.gz DVmtr-cmp01:/tmp/dataverse.dump.gz
+docker exec dataverse ./opt/payara/init.d/cronjob/backupData.sh
+
+rm /mnt/dataverse.dump.gz
\ No newline at end of file
diff --git a/distros/dataverse.no/runOnce/update_5.14.sh b/distros/dataverse.no/runOnce/update_5.14.sh
new file mode 100644
index 0000000..22d867f
--- /dev/null
+++ b/distros/dataverse.no/runOnce/update_5.14.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled
+
+curl -s https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv -o /tmp/citation.tsv
+curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @/tmp/citation.tsv -H "Content-type: text/tab-separated-values"
+
+rm /tmp/citation.tsv
\ No newline at end of file
From c31f3739ce152e8b82d02dd7df01ff6c017f01bc Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 08:41:00 +0100
Subject: [PATCH 305/354] create directory if not exists and put output to log
file
---
.../init.d/cronjob/checkETagByFiles.sh | 16 ++++++++++++++--
1 file changed, 14 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 586e231..c96a0d7 100644
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -3,6 +3,18 @@
export PGPASSWORD=`cat /secrets/db/password`
cp -r /secrets/aws-cli/.aws ~
+#
+LogDir="/opt/payara/appserver/glassfish/domains/domain1/logs/"
+if [ ! -d "${LogDir}" ]; then
+ LogDir="/var/log/"
+fi
+
+if [ ! -d "/tmp/" ]; then
+ mkdir -p "/tmp/"
+fi
+
+LogFile="${LogDir}checkETag_`date +%Y%m%d_%H%M%z`.log"
+
psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /tmp/dataverse_checkETag.txt
while read p; do
@@ -13,11 +25,11 @@ while read p; do
if [ -z "${s3ETag}" ]; then
- echo "is not exist in the s3 storage: ${arrayData[0]}"
+ echo "is not exist in the s3 storage: ${arrayData[0]}" > ${LogFile}
else
if [ "${s3ETag}" != "${arrayData[1]}" ]; then
- echo "is not equal: ${arrayData[0]}"
+ echo "is not equal: ${arrayData[0]}" > ${LogFile}
fi
fi
From 06b177360fd48b91c615e0b3139626617213e591 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 08:55:05 +0100
Subject: [PATCH 306/354] add output update 5.14
---
distros/dataverse.no/runOnce/update_5.14.sh | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/distros/dataverse.no/runOnce/update_5.14.sh b/distros/dataverse.no/runOnce/update_5.14.sh
index 22d867f..90f9df3 100644
--- a/distros/dataverse.no/runOnce/update_5.14.sh
+++ b/distros/dataverse.no/runOnce/update_5.14.sh
@@ -1,8 +1,12 @@
#!/bin/bash
+echo "Enable File PID:"
curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled
+echo "PID finish"
+echo "Downlaod and load citation.tsv file"
curl -s https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv -o /tmp/citation.tsv
curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @/tmp/citation.tsv -H "Content-type: text/tab-separated-values"
+echo "citation.tsv finish"
rm /tmp/citation.tsv
\ No newline at end of file
From bc0e7dd4df8b83f97a5f90555a5f963e9ad0e018 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 09:02:40 +0100
Subject: [PATCH 307/354] bug fix: dumpdatabase.sh
---
distros/dataverse.no/init.d/cronjob/dumpdatabase.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
index fdee092..5babb41 100755
--- a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
+++ b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh
@@ -14,6 +14,6 @@ docker exec --user postgres postgres pg_dump -U dataverse dataverse | gzip > /mn
#cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/dataverse.`date +%Y%m%d_%H%M%z`.dump.gz
rsync -arvP --rsh=ssh /mnt/dataverse.dump.gz DVmtr-cmp01:/tmp/dataverse.dump.gz
-docker exec dataverse ./opt/payara/init.d/cronjob/backupData.sh
+docker exec dataverse /opt/payara/init.d/cronjob/backupData.sh
rm /mnt/dataverse.dump.gz
\ No newline at end of file
From 921d2eaeadd45bc569e6f5288ee573edf805c715 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 09:08:07 +0100
Subject: [PATCH 308/354] backup Data script fix some bugs
---
distros/dataverse.no/init.d/cronjob/backupData.sh | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
index e150ae3..4e82e68 100755
--- a/distros/dataverse.no/init.d/cronjob/backupData.sh
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -10,7 +10,11 @@ cp -r /secrets/aws-cli/.aws ~
#pg_dump -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} | gzip > /mnt/dataverse.dump.gz
dumpName="dataverse.`date +%Y%m%d_%H%M%z`.dump.gz"
-cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/${dumpName}
+
+if [ -d "/mntblob/data/databaseDumps/" ]; then
+ cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/${dumpName}
+fi
+
aws s3 --endpoint https://$aws_endpoint cp /mnt/dataverse.dump.gz s3://$aws_bucket_name/databaseDumps/${dumpName}
# backup files
@@ -29,7 +33,7 @@ for file in $files
done
#echo $files
-rm ~/.aws
+rm -rf ~/.aws
#cp -r /secrets/aws-cli/.aws ~
#aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file
From 158fea5610330727d35c528dcfb59c4b73a57f59 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 18:50:29 +0100
Subject: [PATCH 309/354] fix bug output log
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index c96a0d7..552e4d5 100644
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -25,12 +25,14 @@ while read p; do
if [ -z "${s3ETag}" ]; then
- echo "is not exist in the s3 storage: ${arrayData[0]}" > ${LogFile}
+ echo "is not exist in the s3 storage: ${arrayData[0]}" >> ${LogFile}
else
if [ "${s3ETag}" != "${arrayData[1]}" ]; then
- echo "is not equal: ${arrayData[0]}" > ${LogFile}
+ echo "is not equal: ${arrayData[0]}" >> ${LogFile}
fi
fi
+ sleep 1s
+
done < /tmp/dataverse_checkETag.txt
From 1087d9c015a88aadc3e93c2dda3ec8143fa30864 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 19:27:24 +0100
Subject: [PATCH 310/354] reboot consistern
---
.../init.d/cronjob/checkETagByFiles.sh | 26 +++++++++++++++----
1 file changed, 21 insertions(+), 5 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 552e4d5..10e5ed0 100644
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -15,14 +15,20 @@ fi
LogFile="${LogDir}checkETag_`date +%Y%m%d_%H%M%z`.log"
-psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /tmp/dataverse_checkETag.txt
+if [ ! -f "/tmp/dataverse_checkETag.txt" ]; then
+ psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /tmp/dataverse_checkETag.txt
+fi
+
+#while read p; do
+while true; do
-while read p; do
- IFS=' ' read -a arrayData <<< "$p"
+ line=$(head -n 1 /tmp/dataverse_checkETag.txt)
+
+ IFS=' ' read -a arrayData <<< "$line"
#echo ${arrayData[0]}
s3ETag=$(aws s3api --endpoint https://$aws_endpoint head-object --bucket $aws_bucket_name --key ${arrayData[0]} 2> /dev/null | jq .ETag | sed 's/\"//g' | sed 's/\\//g')
-
+
if [ -z "${s3ETag}" ]; then
echo "is not exist in the s3 storage: ${arrayData[0]}" >> ${LogFile}
@@ -33,6 +39,16 @@ while read p; do
fi
fi
+ tail -n +1 "/tmp/dataverse_checkETag.txt" > "/tmp/dataverse_checkETag.txt.tmp"
+ mv "/tmp/dataverse_checkETag.txt.tmp" "/tmp/dataverse_checkETag.txt"
+
+ if [ ! -s "/tmp/dataverse_checkETag.txt" ]; then
+ exit 0
+ fi
+
sleep 1s
+done
+#done < /tmp/dataverse_checkETag.txt
+
-done < /tmp/dataverse_checkETag.txt
+rm /tmp/dataverse_checkETag.txt
\ No newline at end of file
From 43ab8171e38457997c9fff7399f2eceb74c886d6 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 19:30:57 +0100
Subject: [PATCH 311/354] fix first line remove
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 10e5ed0..83ec959 100644
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -39,7 +39,7 @@ while true; do
fi
fi
- tail -n +1 "/tmp/dataverse_checkETag.txt" > "/tmp/dataverse_checkETag.txt.tmp"
+ tail -n +2 "/tmp/dataverse_checkETag.txt" > "/tmp/dataverse_checkETag.txt.tmp"
mv "/tmp/dataverse_checkETag.txt.tmp" "/tmp/dataverse_checkETag.txt"
if [ ! -s "/tmp/dataverse_checkETag.txt" ]; then
From cc2e6a002c540944e0c7c350d92d1b061dbf253b Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Fri, 15 Mar 2024 20:00:11 +0100
Subject: [PATCH 312/354] remove replace sql commend
---
distros/dataverse.no/init.d/cronjob/checkfiles.sql | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkfiles.sql b/distros/dataverse.no/init.d/cronjob/checkfiles.sql
index 74c3ca0..9f73dee 100644
--- a/distros/dataverse.no/init.d/cronjob/checkfiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/checkfiles.sql
@@ -1,3 +1,3 @@
--select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '2 day');
-select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile';
\ No newline at end of file
+select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', storageidentifier ), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile';
\ No newline at end of file
From cc6ea169b73fd1247aa131353535a4e84bdcf080 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Mon, 18 Mar 2024 11:03:34 +0100
Subject: [PATCH 313/354] footer fix "Footer customization #20"
---
distros/dataverse.no/modification/custom-footer.html | 4 ++--
distros/dataverse.no/modification/dataverse_footer.xhtml | 8 ++++----
distros/dataverse.no/modification/dataverse_header.xhtml | 2 +-
3 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/distros/dataverse.no/modification/custom-footer.html b/distros/dataverse.no/modification/custom-footer.html
index f033137..e8800fc 100644
--- a/distros/dataverse.no/modification/custom-footer.html
+++ b/distros/dataverse.no/modification/custom-footer.html
@@ -34,11 +34,11 @@
+ Deposit Guide
-
+
+
+
From 798cf51127b4054ccd0dedead8b72a686c80ac08 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 21 Mar 2024 13:07:18 +0000
Subject: [PATCH 318/354] Shibbpleth change OS to Debian
---
.../dataverse.no/configs/http-dataverse.conf | 213 +++++
distros/dataverse.no/configs/http-ssl.conf | 893 +++++++++++-------
distros/dataverse.no/configs/httpd.conf | 551 -----------
3 files changed, 764 insertions(+), 893 deletions(-)
create mode 100644 distros/dataverse.no/configs/http-dataverse.conf
mode change 100755 => 100644 distros/dataverse.no/configs/http-ssl.conf
delete mode 100755 distros/dataverse.no/configs/httpd.conf
diff --git a/distros/dataverse.no/configs/http-dataverse.conf b/distros/dataverse.no/configs/http-dataverse.conf
new file mode 100644
index 0000000..dae9b15
--- /dev/null
+++ b/distros/dataverse.no/configs/http-dataverse.conf
@@ -0,0 +1,213 @@
+
+
+
+ ServerName test-docker.dataverse.no
+ DocumentRoot /var/www/html
+# ErrorLog /var/logs/http-error_log
+# CustomLog /var/logs/http-access_log combined env=!monitor
+
+#Header always set X-Frame-Options "SAMEORIGIN"
+#Header always set X-XSS-Protection "1; mode=block"
+#Header always set X-Content-Type-Options "nosniff"
+
+
+ Options None
+ Require all granted
+
+
+RewriteEngine On
+RewriteCond %{HTTPS} !=on
+RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L]
+
+
+
+
+ ServerName test-docker.dataverse.no
+
+ Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains"
+
+ Header always set X-Frame-Options "SAMEORIGIN"
+ Header always set X-XSS-Protection "1; mode=block"
+ Header always set X-Content-Type-Options "nosniff"
+ Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None"
+
+ErrorLog /dev/stdout
+TransferLog /dev/stdout
+
+# SSL Engine Switch:
+# Enable/Disable SSL for this virtual host.
+ SSLEngine on
+
+
+# SSL Protocol support:
+# List the enable protocol levels with which clients will be able to
+# connect. Disable SSLv2 access by default:
+SSLProtocol +TLSv1.3 +TLSv1.2
+SSLCipherSuite ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256
+SSLHonorCipherOrder on
+SSLCompression off
+SSLSessionTickets off
+
+
+# Server Certificate:
+# Point SSLCertificateFile at a PEM encoded certificate. If
+# the certificate is encrypted, then you will be prompted for a
+# pass phrase. Note that a kill -HUP will prompt again. A new
+# certificate can be generated using the genkey(1) command.
+# vty
+SSLCertificateFile /etc/ssl/certs/localhost.crt
+
+# Server Private Key:
+# If the key is not combined with the certificate, use this
+# directive to point at the key file. Keep in mind that if
+# you've both a RSA and a DSA private key you can configure
+# both in parallel (to also allow the use of DSA ciphers, etc.)
+# #vty
+SSLCertificateKeyFile /etc/ssl/private/localhost.key
+
+#
+# Turn this on to support "require valid-user" rules from other
+# mod_authn_* modules, and use "require shib-session" for anonymous
+# session-based authorization in mod_shib.
+#
+ShibCompatValidUser Off
+
+#
+# Ensures handler will be accessible.
+#
+
+ AuthType None
+ Require all granted
+ # vty
+ ShibRequestSetting requireSession 1
+ require shib-session
+
+
+
+ AuthType shibboleth
+ ShibRequestSetting requireSession 1
+ require valid-user
+
+
+
+#
+# Used for example style sheet in error templates.
+#
+
+
+ AuthType None
+ Require all granted
+
+ Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css
+
+
+#
+# Configure the module for content.
+#
+# You MUST enable AuthType shibboleth for the module to process
+# any requests, and there MUST be a require command as well. To
+# enable Shibboleth but not specify any session/access requirements
+# use "require shibboleth".
+#
+
+ AuthType shibboleth
+ ShibRequestSetting requireSession 1
+ require shib-session
+
+
+
+SSLProxyVerify none
+SSLProxyCheckPeerCN off
+SSLProxyCheckPeerName off
+SSLProxyCheckPeerExpire off
+
+ProxyPassInterpolateEnv On
+ProxyPassMatch ^/Shibboleth.sso !
+ProxyPassMatch ^/shibboleth-ds !
+ProxyPass "/" "ajp://dataverse:8009/" timeout=600
+ProxyPassReverse "/" "ajp://dataverse:8009/" timeout=600
+ProxyPassReverseCookieDomain "dataverse" "test-docker.dataverse.no"
+ProxyPassReverseCookiePath "/" "/"
+
+
+# SSL Engine Options:
+# Set various options for the SSL engine.
+# o FakeBasicAuth:
+# Translate the client X.509 into a Basic Authorisation. This means that
+# the standard Auth/DBMAuth methods can be used for access control. The
+# user name is the `one line' version of the client's X.509 certificate.
+# Note that no password is obtained from the user. Every entry in the user
+# file needs this password: `xxj31ZMTZzkVA'.
+# o ExportCertData:
+# This exports two additional environment variables: SSL_CLIENT_CERT and
+# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
+# server (always existing) and the client (only existing when client
+# authentication is used). This can be used to import the certificates
+# into CGI scripts.
+# o StdEnvVars:
+# This exports the standard SSL/TLS related `SSL_*' environment variables.
+# Per default this exportation is switched off for performance reasons,
+# because the extraction step is an expensive operation and is usually
+# useless for serving static content. So one usually enables the
+# exportation for CGI and SSI requests only.
+# o StrictRequire:
+# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
+# under a "Satisfy any" situation, i.e. when it applies access is denied
+# and no other module can change it.
+# o OptRenegotiate:
+# This enables optimized SSL connection renegotiation handling when SSL
+# directives are used in per-directory context.
+#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
+
+ SSLOptions +StdEnvVars
+
+
+ SSLOptions +StdEnvVars
+
+
+
+# SSL Protocol Adjustments:
+# The safe and default but still SSL/TLS standard compliant shutdown
+# approach is that mod_ssl sends the close notify alert but doesn't wait for
+# the close notify alert from client. When you need a different shutdown
+# approach you can use one of the following variables:
+# o ssl-unclean-shutdown:
+# This forces an unclean shutdown when the connection is closed, i.e. no
+# SSL close notify alert is send or allowed to received. This violates
+# the SSL/TLS standard but is needed for some brain-dead browsers. Use
+# this when you receive I/O errors because of the standard approach where
+# mod_ssl sends the close notify alert.
+# o ssl-accurate-shutdown:
+# This forces an accurate shutdown when the connection is closed, i.e. a
+# SSL close notify alert is send and mod_ssl waits for the close notify
+# alert of the client. This is 100% SSL/TLS standard compliant, but in
+# practice often causes hanging connections with brain-dead browsers. Use
+# this only for browsers where you know that their SSL implementation
+# works correctly.
+# Notice: Most problems of broken clients are also related to the HTTP
+# keep-alive facility, so you usually additionally want to disable
+# keep-alive for those clients, too. Use variable "nokeepalive" for this.
+# Similarly, one has to force some clients to use HTTP/1.0 to workaround
+# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
+# "force-response-1.0" for this.
+BrowserMatch "MSIE [2-5]" \
+ nokeepalive ssl-unclean-shutdown \
+ downgrade-1.0 force-response-1.0
+
+# Per-Server Logging:
+# The home of a custom SSL log file. Use this when you want a
+# compact non-error SSL logfile on a virtual host basis.
+#CustomLog /dev/stdout \
+# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
+Customlog /var/log/httpd/access.log combined
+ErrorLog /var/log/httpd/error.log
+
+ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i"
+
+
+
+
+ ProxyPass !
+
+Alias /robots.txt /var/www/robots.txt
+
diff --git a/distros/dataverse.no/configs/http-ssl.conf b/distros/dataverse.no/configs/http-ssl.conf
old mode 100755
new mode 100644
index 79a2eb8..d1e0272
--- a/distros/dataverse.no/configs/http-ssl.conf
+++ b/distros/dataverse.no/configs/http-ssl.conf
@@ -1,32 +1,566 @@
#
-# When we also provide SSL we have to listen to the
-# the HTTPS port in addition.
+# This is the main Apache HTTP server configuration file. It contains the
+# configuration directives that give the server its instructions.
+# See for detailed information.
+# In particular, see
+#
+# for a discussion of each configuration directive.
#
-Listen 443 https
+# Do NOT simply read the instructions in here without understanding
+# what they do. They're here only as hints or reminders. If you are unsure
+# consult the online docs. You have been warned.
+#
+# Configuration and logfile names: If the filenames you specify for many
+# of the server's control files begin with "/" (or "drive:/" for Win32), the
+# server will use that explicit path. If the filenames do *not* begin
+# with "/", the value of ServerRoot is prepended -- so "logs/access_log"
+# with ServerRoot set to "/usr/local/apache2" will be interpreted by the
+# server as "/usr/local/apache2/logs/access_log", whereas "/logs/access_log"
+# will be interpreted as '/logs/access_log'.
+
+#
+# ServerRoot: The top of the directory tree under which the server's
+# configuration, error, and log files are kept.
+#
+# Do not add a slash at the end of the directory path. If you point
+# ServerRoot at a non-local disk, be sure to specify a local disk on the
+# Mutex directive, if file-based mutexes are used. If you wish to share the
+# same ServerRoot for multiple httpd daemons, you will need to change at
+# least PidFile.
+#
+ServerRoot "/usr/local/apache2"
+
+#
+# Mutex: Allows you to set the mutex mechanism and mutex file directory
+# for individual mutexes, or change the global defaults
+#
+# Uncomment and change the directory if mutexes are file-based and the default
+# mutex file directory is not on a local disk or is not appropriate for some
+# other reason.
+#
+# Mutex default:logs
+
+#
+# Listen: Allows you to bind Apache to specific IP addresses and/or
+# ports, instead of the default. See also the
+# directive.
+#
+# Change this to Listen on specific IP addresses as shown below to
+# prevent Apache from glomming onto all bound IP addresses.
+#
+#Listen 12.34.56.78:80
+Listen 80
+Listen 443
TimeOut 600
LimitRequestBody 0
-##
-## SSL Global Context
-##
-## All SSL configuration in this context applies both to
-## the main server and all SSL-enabled virtual hosts.
-##
+#
+# Dynamic Shared Object (DSO) Support
+#
+# To be able to use the functionality of a module which was built as a DSO you
+# have to place corresponding `LoadModule' lines at this location so the
+# directives contained in it are actually available _before_ they are used.
+# Statically compiled modules (those listed by `httpd -l') do not need
+# to be loaded here.
+#
+# Example:
+# LoadModule foo_module modules/mod_foo.so
+#
+LoadModule mpm_event_module modules/mod_mpm_event.so
+#LoadModule mpm_prefork_module modules/mod_mpm_prefork.so
+#LoadModule mpm_worker_module modules/mod_mpm_worker.so
+LoadModule authn_file_module modules/mod_authn_file.so
+#LoadModule authn_dbm_module modules/mod_authn_dbm.so
+#LoadModule authn_anon_module modules/mod_authn_anon.so
+#LoadModule authn_dbd_module modules/mod_authn_dbd.so
+#LoadModule authn_socache_module modules/mod_authn_socache.so
+LoadModule authn_core_module modules/mod_authn_core.so
+LoadModule authz_host_module modules/mod_authz_host.so
+LoadModule authz_groupfile_module modules/mod_authz_groupfile.so
+LoadModule authz_user_module modules/mod_authz_user.so
+#LoadModule authz_dbm_module modules/mod_authz_dbm.so
+#LoadModule authz_owner_module modules/mod_authz_owner.so
+#LoadModule authz_dbd_module modules/mod_authz_dbd.so
+LoadModule authz_core_module modules/mod_authz_core.so
+#LoadModule authnz_ldap_module modules/mod_authnz_ldap.so
+#LoadModule authnz_fcgi_module modules/mod_authnz_fcgi.so
+LoadModule access_compat_module modules/mod_access_compat.so
+LoadModule auth_basic_module modules/mod_auth_basic.so
+#LoadModule auth_form_module modules/mod_auth_form.so
+#LoadModule auth_digest_module modules/mod_auth_digest.so
+#LoadModule allowmethods_module modules/mod_allowmethods.so
+#LoadModule isapi_module modules/mod_isapi.so
+#LoadModule file_cache_module modules/mod_file_cache.so
+#LoadModule cache_module modules/mod_cache.so
+#LoadModule cache_disk_module modules/mod_cache_disk.so
+#LoadModule cache_socache_module modules/mod_cache_socache.so
+LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
+#LoadModule socache_dbm_module modules/mod_socache_dbm.so
+#LoadModule socache_memcache_module modules/mod_socache_memcache.so
+#LoadModule socache_redis_module modules/mod_socache_redis.so
+#LoadModule watchdog_module modules/mod_watchdog.so
+#LoadModule macro_module modules/mod_macro.so
+#LoadModule dbd_module modules/mod_dbd.so
+#LoadModule bucketeer_module modules/mod_bucketeer.so
+#LoadModule dumpio_module modules/mod_dumpio.so
+#LoadModule echo_module modules/mod_echo.so
+#LoadModule example_hooks_module modules/mod_example_hooks.so
+#LoadModule case_filter_module modules/mod_case_filter.so
+#LoadModule case_filter_in_module modules/mod_case_filter_in.so
+#LoadModule example_ipc_module modules/mod_example_ipc.so
+#LoadModule buffer_module modules/mod_buffer.so
+#LoadModule data_module modules/mod_data.so
+#LoadModule ratelimit_module modules/mod_ratelimit.so
+LoadModule reqtimeout_module modules/mod_reqtimeout.so
+#LoadModule ext_filter_module modules/mod_ext_filter.so
+#LoadModule request_module modules/mod_request.so
+#LoadModule include_module modules/mod_include.so
+LoadModule filter_module modules/mod_filter.so
+#LoadModule reflector_module modules/mod_reflector.so
+#LoadModule substitute_module modules/mod_substitute.so
+#LoadModule sed_module modules/mod_sed.so
+#LoadModule charset_lite_module modules/mod_charset_lite.so
+#LoadModule deflate_module modules/mod_deflate.so
+#LoadModule xml2enc_module modules/mod_xml2enc.so
+#LoadModule proxy_html_module modules/mod_proxy_html.so
+#LoadModule brotli_module modules/mod_brotli.so
+LoadModule mime_module modules/mod_mime.so
+#LoadModule ldap_module modules/mod_ldap.so
+LoadModule log_config_module modules/mod_log_config.so
+#LoadModule log_debug_module modules/mod_log_debug.so
+#LoadModule log_forensic_module modules/mod_log_forensic.so
+#LoadModule logio_module modules/mod_logio.so
+#LoadModule lua_module modules/mod_lua.so
+LoadModule env_module modules/mod_env.so
+#LoadModule mime_magic_module modules/mod_mime_magic.so
+#LoadModule cern_meta_module modules/mod_cern_meta.so
+#LoadModule expires_module modules/mod_expires.so
+LoadModule headers_module modules/mod_headers.so
+#LoadModule ident_module modules/mod_ident.so
+#LoadModule usertrack_module modules/mod_usertrack.so
+#LoadModule unique_id_module modules/mod_unique_id.so
+LoadModule setenvif_module modules/mod_setenvif.so
+LoadModule version_module modules/mod_version.so
+#LoadModule remoteip_module modules/mod_remoteip.so
+LoadModule proxy_module modules/mod_proxy.so
+#LoadModule proxy_connect_module modules/mod_proxy_connect.so
+#LoadModule proxy_ftp_module modules/mod_proxy_ftp.so
+#LoadModule proxy_http_module modules/mod_proxy_http.so
+#LoadModule proxy_fcgi_module modules/mod_proxy_fcgi.so
+#LoadModule proxy_scgi_module modules/mod_proxy_scgi.so
+#LoadModule proxy_uwsgi_module modules/mod_proxy_uwsgi.so
+#LoadModule proxy_fdpass_module modules/mod_proxy_fdpass.so
+#LoadModule proxy_wstunnel_module modules/mod_proxy_wstunnel.so
+LoadModule proxy_ajp_module modules/mod_proxy_ajp.so
+#LoadModule proxy_balancer_module modules/mod_proxy_balancer.so
+#LoadModule proxy_express_module modules/mod_proxy_express.so
+#LoadModule proxy_hcheck_module modules/mod_proxy_hcheck.so
+#LoadModule session_module modules/mod_session.so
+#LoadModule session_cookie_module modules/mod_session_cookie.so
+#LoadModule session_crypto_module modules/mod_session_crypto.so
+#LoadModule session_dbd_module modules/mod_session_dbd.so
+#LoadModule slotmem_shm_module modules/mod_slotmem_shm.so
+#LoadModule slotmem_plain_module modules/mod_slotmem_plain.so
+LoadModule ssl_module modules/mod_ssl.so
+#LoadModule optional_hook_export_module modules/mod_optional_hook_export.so
+#LoadModule optional_hook_import_module modules/mod_optional_hook_import.so
+#LoadModule optional_fn_import_module modules/mod_optional_fn_import.so
+#LoadModule optional_fn_export_module modules/mod_optional_fn_export.so
+#LoadModule dialup_module modules/mod_dialup.so
+LoadModule http2_module modules/mod_http2.so
+#LoadModule proxy_http2_module modules/mod_proxy_http2.so
+#LoadModule md_module modules/mod_md.so
+#LoadModule lbmethod_byrequests_module modules/mod_lbmethod_byrequests.so
+#LoadModule lbmethod_bytraffic_module modules/mod_lbmethod_bytraffic.so
+#LoadModule lbmethod_bybusyness_module modules/mod_lbmethod_bybusyness.so
+#LoadModule lbmethod_heartbeat_module modules/mod_lbmethod_heartbeat.so
+LoadModule unixd_module modules/mod_unixd.so
+#LoadModule heartbeat_module modules/mod_heartbeat.so
+#LoadModule heartmonitor_module modules/mod_heartmonitor.so
+#LoadModule dav_module modules/mod_dav.so
+LoadModule status_module modules/mod_status.so
+LoadModule autoindex_module modules/mod_autoindex.so
+#LoadModule asis_module modules/mod_asis.so
+#LoadModule info_module modules/mod_info.so
+#LoadModule suexec_module modules/mod_suexec.so
+
+ #LoadModule cgid_module modules/mod_cgid.so
+
+
+ #LoadModule cgi_module modules/mod_cgi.so
+
+#LoadModule dav_fs_module modules/mod_dav_fs.so
+#LoadModule dav_lock_module modules/mod_dav_lock.so
+#LoadModule vhost_alias_module modules/mod_vhost_alias.so
+#LoadModule negotiation_module modules/mod_negotiation.so
+LoadModule dir_module modules/mod_dir.so
+#LoadModule imagemap_module modules/mod_imagemap.so
+#LoadModule actions_module modules/mod_actions.so
+#LoadModule speling_module modules/mod_speling.so
+#LoadModule userdir_module modules/mod_userdir.so
+LoadModule alias_module modules/mod_alias.so
+LoadModule rewrite_module modules/mod_rewrite.so
+LoadModule mod_shib /usr/lib/apache2/modules/mod_shib.so
+
+
+#
+# If you wish httpd to run as a different user or group, you must run
+# httpd as root initially and it will switch.
+#
+# User/Group: The name (or #number) of the user/group to run httpd as.
+# It is usually good practice to create a dedicated user and group for
+# running httpd, as with most system services.
+#
+User www-data
+Group www-data
+
+
+
+# 'Main' server configuration
+#
+# The directives in this section set up the values used by the 'main'
+# server, which responds to any requests that aren't handled by a
+# definition. These values also provide defaults for
+# any containers you may define later in the file.
+#
+# All of these directives may appear inside containers,
+# in which case these default settings will be overridden for the
+# virtual host being defined.
+#
+
+#
+# ServerAdmin: Your address, where problems with the server should be
+# e-mailed. This address appears on some server-generated pages, such
+# as error documents. e.g. admin@your-domain.com
+#
+ServerAdmin support@dataverse.no
+
+#
+# ServerName gives the name and port that the server uses to identify itself.
+# This can often be determined automatically, but we recommend you specify
+# it explicitly to prevent problems during startup.
+#
+# If your host doesn't have a registered DNS name, enter its IP address here.
+#
+ServerName test-docker.dataverse.no
+
+#
+# Deny access to the entirety of your server's filesystem. You must
+# explicitly permit access to web content directories in other
+# blocks below.
+#
+
+ AllowOverride none
+ Require all denied
+
+
+#
+# Note that from this point forward you must specifically allow
+# particular features to be enabled - so if something's not working as
+# you might expect, make sure that you have specifically enabled it
+# below.
+#
+
+#
+# DocumentRoot: The directory out of which you will serve your
+# documents. By default, all requests are taken from this directory, but
+# symbolic links and aliases may be used to point to other locations.
+#
+DocumentRoot "/usr/local/apache2/htdocs"
+
+ #
+ # Possible values for the Options directive are "None", "All",
+ # or any combination of:
+ # Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
+ #
+ # Note that "MultiViews" must be named *explicitly* --- "Options All"
+ # doesn't give it to you.
+ #
+ # The Options directive is both complicated and important. Please see
+ # http://httpd.apache.org/docs/2.4/mod/core.html#options
+ # for more information.
+ #
+ Options Indexes FollowSymLinks
+
+ #
+ # AllowOverride controls what directives may be placed in .htaccess files.
+ # It can be "All", "None", or any combination of the keywords:
+ # AllowOverride FileInfo AuthConfig Limit
+ #
+ AllowOverride None
+
+ #
+ # Controls who can get stuff from this server.
+ #
+ Require all granted
+
+
+#
+# DirectoryIndex: sets the file that Apache will serve if a directory
+# is requested.
+#
+
+ DirectoryIndex index.html
+
+
+#
+# The following lines prevent .htaccess and .htpasswd files from being
+# viewed by Web clients.
+#
+
+ Require all denied
+
+
+#
+# ErrorLog: The location of the error log file.
+# If you do not specify an ErrorLog directive within a
+# container, error messages relating to that virtual host will be
+# logged here. If you *do* define an error logfile for a
+# container, that host's errors will be logged there and not here.
+#
+ErrorLog /proc/self/fd/2
+
+#
+# LogLevel: Control the number of messages logged to the error_log.
+# Possible values include: debug, info, notice, warn, error, crit,
+# alert, emerg.
+#
+LogLevel warn
+
+
+ #
+ # The following directives define some format nicknames for use with
+ # a CustomLog directive (see below).
+ #
+ LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
+ LogFormat "%h %l %u %t \"%r\" %>s %b" common
+
+
+ # You need to enable mod_logio.c to use %I and %O
+ LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
+
+
+ #
+ # The location and format of the access logfile (Common Logfile Format).
+ # If you do not define any access logfiles within a
+ # container, they will be logged here. Contrariwise, if you *do*
+ # define per- access logfiles, transactions will be
+ # logged therein and *not* in this file.
+ #
+ CustomLog /proc/self/fd/1 common
+
+ #
+ # If you prefer a logfile with access, agent, and referer information
+ # (Combined Logfile Format) you can use the following directive.
+ #
+ #CustomLog "logs/access_log" combined
+
+
+
+ #
+ # Redirect: Allows you to tell clients about documents that used to
+ # exist in your server's namespace, but do not anymore. The client
+ # will make a new request for the document at its new location.
+ # Example:
+ # Redirect permanent /foo http://www.example.com/bar
+
+ #
+ # Alias: Maps web paths into filesystem paths and is used to
+ # access content that does not live under the DocumentRoot.
+ # Example:
+ # Alias /webpath /full/filesystem/path
+ #
+ # If you include a trailing / on /webpath then the server will
+ # require it to be present in the URL. You will also likely
+ # need to provide a section to allow access to
+ # the filesystem path.
+
+ #
+ # ScriptAlias: This controls which directories contain server scripts.
+ # ScriptAliases are essentially the same as Aliases, except that
+ # documents in the target directory are treated as applications and
+ # run by the server when requested rather than as documents sent to the
+ # client. The same rules about trailing "/" apply to ScriptAlias
+ # directives as to Alias.
+ #
+ ScriptAlias /cgi-bin/ "/usr/local/apache2/cgi-bin/"
+
+
+
+
+ #
+ # ScriptSock: On threaded servers, designate the path to the UNIX
+ # socket used to communicate with the CGI daemon of mod_cgid.
+ #
+ #Scriptsock cgisock
+
+
+#
+# "/usr/local/apache2/cgi-bin" should be changed to whatever your ScriptAliased
+# CGI directory exists, if you have that configured.
+#
+
+ AllowOverride None
+ Options None
+ Require all granted
+
+
+
+ #
+ # Avoid passing HTTP_PROXY environment to CGI's on this or any proxied
+ # backend servers which have lingering "httpoxy" defects.
+ # 'Proxy' request header is undefined by the IETF, not listed by IANA
+ #
+ RequestHeader unset Proxy early
+
+
+
+ #
+ # TypesConfig points to the file containing the list of mappings from
+ # filename extension to MIME-type.
+ #
+ TypesConfig conf/mime.types
+
+ #
+ # AddType allows you to add to or override the MIME configuration
+ # file specified in TypesConfig for specific file types.
+ #
+ #AddType application/x-gzip .tgz
+ #
+ # AddEncoding allows you to have certain browsers uncompress
+ # information on the fly. Note: Not all browsers support this.
+ #
+ #AddEncoding x-compress .Z
+ #AddEncoding x-gzip .gz .tgz
+ #
+ # If the AddEncoding directives above are commented-out, then you
+ # probably should define those extensions to indicate media types:
+ #
+ AddType application/x-compress .Z
+ AddType application/x-gzip .gz .tgz
+
+ #
+ # AddHandler allows you to map certain file extensions to "handlers":
+ # actions unrelated to filetype. These can be either built into the server
+ # or added with the Action directive (see below)
+ #
+ # To use CGI scripts outside of ScriptAliased directories:
+ # (You will also need to add "ExecCGI" to the "Options" directive.)
+ #
+ #AddHandler cgi-script .cgi
+
+ # For type maps (negotiated resources):
+ #AddHandler type-map var
+
+ #
+ # Filters allow you to process content before it is sent to the client.
+ #
+ # To parse .shtml files for server-side includes (SSI):
+ # (You will also need to add "Includes" to the "Options" directive.)
+ #
+ #AddType text/html .shtml
+ #AddOutputFilter INCLUDES .shtml
+
+
+#
+# The mod_mime_magic module allows the server to use various hints from the
+# contents of the file itself to determine its type. The MIMEMagicFile
+# directive tells the module where the hint definitions are located.
+#
+#MIMEMagicFile conf/magic
+
+#
+# Customizable error responses come in three flavors:
+# 1) plain text 2) local redirects 3) external redirects
+#
+# Some examples:
+#ErrorDocument 500 "The server made a boo boo."
+#ErrorDocument 404 /missing.html
+#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
+#ErrorDocument 402 http://www.example.com/subscription_info.html
+#
+
+#
+# MaxRanges: Maximum number of Ranges in a request before
+# returning the entire resource, or one of the special
+# values 'default', 'none' or 'unlimited'.
+# Default setting is to accept 200 Ranges.
+#MaxRanges unlimited
+
+#
+# EnableMMAP and EnableSendfile: On systems that support it,
+# memory-mapping or the sendfile syscall may be used to deliver
+# files. This usually improves server performance, but must
+# be turned off when serving from networked-mounted
+# filesystems or if support for these functions is otherwise
+# broken on your system.
+# Defaults: EnableMMAP On, EnableSendfile Off
+#
+#EnableMMAP off
+#EnableSendfile on
+
+# Supplemental configuration
+#
+# The configuration files in the conf/extra/ directory can be
+# included to add extra features or to modify the default configuration of
+# the server, or you may simply copy their contents here and change as
+# necessary.
+
+# Server-pool management (MPM specific)
+#Include conf/extra/httpd-mpm.conf
+
+# Multi-language error messages
+#Include conf/extra/httpd-multilang-errordoc.conf
+
+# Fancy directory listings
+#Include conf/extra/httpd-autoindex.conf
+
+# Language settings
+#Include conf/extra/httpd-languages.conf
+
+# User home directories
+#Include conf/extra/httpd-userdir.conf
+
+# Real-time info on requests and configuration
+#Include conf/extra/httpd-info.conf
-# Pass Phrase Dialog:
-# Configure the pass phrase gathering process.
-# The filtering dialog program (`builtin' is a internal
-# terminal dialog) has to provide the pass phrase on stdout.
-SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog
+# Virtual hosts
+Include conf/extra/httpd-vhosts.conf
+
+# Local access to the Apache HTTP Server Manual
+#Include conf/extra/httpd-manual.conf
+
+# Distributed authoring and versioning (WebDAV)
+#Include conf/extra/httpd-dav.conf
+
+# Various default settings
+#Include conf/extra/httpd-default.conf
+
+# Configure mod_proxy_html to understand HTML4/XHTML1
+
+Include conf/extra/proxy-html.conf
+
+
+# Secure (SSL/TLS) connections
+#Include conf/extra/httpd-ssl.conf
+#
+# Note: The following must must be present to support
+# starting without SSL on platforms with no /dev/random equivalent
+# but a statically compiled-in mod_ssl.
+#
+
+SSLRandomSeed startup builtin
+SSLRandomSeed connect builtin
+
# Inter-Process Session Cache:
-# Configure the SSL Session Cache: First the mechanism
+# Configure the SSL Session Cache: First the mechanism
# to use and second the expiring timeout (in seconds).
SSLSessionCache shmcb:/run/httpd/sslcache(512000)
SSLSessionCacheTimeout 300
# Pseudo Random Number Generator (PRNG):
-# Configure one or more sources to seed the PRNG of the
+# Configure one or more sources to seed the PRNG of the
# SSL library. The seed data should be of good random quality.
# WARNING! On some platforms /dev/random blocks if not enough entropy
# is available. This means you then cannot use the /dev/random device
@@ -46,333 +580,8 @@ SSLRandomSeed connect builtin
# accelerators. Use "openssl engine -v" to list supported
# engine names. NOTE: If you enable an accelerator and the
# server does not start, consult the error logs and ensure
-# your accelerator is functioning properly.
+# your accelerator is functioning properly.
#
SSLCryptoDevice builtin
#SSLCryptoDevice ubsec
-##
-## SSL Virtual Host Context
-##
-#
-
- ServerName test-docker-dataverse.azure.uit.no
-# ServerName default
- DocumentRoot /var/www/html
-# ErrorLog /var/logs/http-error_log
-# CustomLog /var/logs/http-access_log combined env=!monitor
-
- #Header always set X-Frame-Options "SAMEORIGIN"
- #Header always set X-XSS-Protection "1; mode=block"
- #Header always set X-Content-Type-Options "nosniff"
-
-
-
- Options None
- Require all granted
-
-
- RewriteEngine On
- RewriteCond %{HTTPS} !=on
- RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L]
-
-
-
-
-
-
-
-# General setup for the virtual host, inherited from global configuration
-#DocumentRoot "/var/www/html"
-#ServerName www.example.com:443
-ServerName test-docker-dataverse.azure.uit.no
-#ServerName default
-Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains"
-# Content-Security-Policy: noen java-filer laster fra http, så denne kan
-# ikke brukes.
-#Header always set Content-Security-Policy "default-src https:"
-#Header always set X-Frame-Options "SAMEORIGIN"
-#Header always set X-XSS-Protection "1; mode=block"
-#Header always set X-Content-Type-Options "nosniff"
-#:443
-
-# Use separate log files for the SSL virtual host; note that LogLevel
-# is not inherited from httpd.conf.
-ErrorLog /dev/stdout
-TransferLog /dev/stdout
-#LoadModule dumpio_module modules/mod_dumpio.so
-#
-# DumpIOInput On
-# DumpIOOutput On
-#
-#LogLevel dumpio:trace7
-
-Header always set X-Frame-Options "SAMEORIGIN"
-Header always set X-XSS-Protection "1; mode=block"
-Header always set X-Content-Type-Options "nosniff"
-Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None"
-
-# SSL Engine Switch:
-# Enable/Disable SSL for this virtual host.
-SSLEngine on
-
-# SSL Protocol support:
-# List the enable protocol levels with which clients will be able to
-# connect. Disable SSLv2 access by default:
-SSLProtocol +TLSv1.3 +TLSv1.2
-SSLCipherSuite ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256
-SSLHonorCipherOrder on
-SSLCompression off
-SSLSessionTickets off
-
-# SSL Cipher Suite:
-# List the ciphers that the client is permitted to negotiate.
-# See the mod_ssl documentation for a complete list.
-#SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA
-
-# Speed-optimized SSL Cipher configuration:
-# If speed is your main concern (on busy HTTPS servers e.g.),
-# you might want to force clients to specific, performance
-# optimized ciphers. In this case, prepend those ciphers
-# to the SSLCipherSuite list, and enable SSLHonorCipherOrder.
-# Caveat: by giving precedence to RC4-SHA and AES128-SHA
-# (as in the example below), most connections will no longer
-# have perfect forward secrecy - if the server's key is
-# compromised, captures of past or future traffic must be
-# considered compromised, too.
-#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5
-#SSLHonorCipherOrder on
-
-# Server Certificate:
-# Point SSLCertificateFile at a PEM encoded certificate. If
-# the certificate is encrypted, then you will be prompted for a
-# pass phrase. Note that a kill -HUP will prompt again. A new
-# certificate can be generated using the genkey(1) command.
-# vty
-SSLCertificateFile /etc/pki/tls/certs/localhost.crt
-
-# Server Private Key:
-# If the key is not combined with the certificate, use this
-# directive to point at the key file. Keep in mind that if
-# you've both a RSA and a DSA private key you can configure
-# both in parallel (to also allow the use of DSA ciphers, etc.)
-# #vty
-SSLCertificateKeyFile /etc/pki/tls/private/localhost.key
-
-# Server Certificate Chain:
-# Point SSLCertificateChainFile at a file containing the
-# concatenation of PEM encoded CA certificates which form the
-# certificate chain for the server certificate. Alternatively
-# the referenced file can be the same as SSLCertificateFile
-# when the CA certificates are directly appended to the server
-# certificate for convinience.
-#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt
-
-# Certificate Authority (CA):
-# Set the CA certificate verification path where to find CA
-# certificates for client authentication or alternatively one
-# huge file containing all of them (file must be PEM encoded)
-#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt
-
-# Client Authentication (Type):
-# Client certificate verification type and depth. Types are
-# none, optional, require and optional_no_ca. Depth is a
-# number which specifies how deeply to verify the certificate
-# issuer chain before deciding the certificate is not valid.
-#SSLVerifyClient require
-#SSLVerifyDepth 10
-
-LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so
-
-#
-# Turn this on to support "require valid-user" rules from other
-# mod_authn_* modules, and use "require shib-session" for anonymous
-# session-based authorization in mod_shib.
-#
-ShibCompatValidUser Off
-
-#
-# Ensures handler will be accessible.
-#
-
- AuthType None
- Require all granted
- # vty
- ShibRequestSetting requireSession 1
- require shib-session
-
-
-
- AuthType shibboleth
- ShibRequestSetting requireSession 1
- require valid-user
-
-
-
-
-#
-# Used for example style sheet in error templates.
-#
-
-
- AuthType None
- Require all granted
-
- Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css
-
-
-#
-# Configure the module for content.
-#
-# You MUST enable AuthType shibboleth for the module to process
-# any requests, and there MUST be a require command as well. To
-# enable Shibboleth but not specify any session/access requirements
-# use "require shibboleth".
-#
-
- AuthType shibboleth
- ShibRequestSetting requireSession 1
- require shib-session
-
-
-
- SSLProxyVerify none
- SSLProxyCheckPeerCN off
- SSLProxyCheckPeerName off
- SSLProxyCheckPeerExpire off
-
-ProxyPassInterpolateEnv On
-ProxyPassMatch ^/Shibboleth.sso !
-ProxyPassMatch ^/shibboleth-ds !
-ProxyPassMatch ^/phpPgAdmin !
-ProxyPassMatch ^/nav !
-ProxyPassMatch ^/minio !
-ProxyPass "/" "ajp://dataverse:8009/" timeout=600
-ProxyPassReverse "/" "ajp://dataverse:8009/" timeout=600
-#ProxyPass "/" "ajp://dataverse:8009/" interpolate
-#ProxyPassReverse "/" "ajp://dataverse:8009/" interpolate
-#ProxyPass / http://dataverse:8080/ interpolate
-#ProxyPassReverse / http://dataverse:8080/ interpolate
-ProxyPassReverseCookieDomain "dataverse" "test-docker-dataverse.azure.uit.no"
-ProxyPassReverseCookiePath "/" "/"
-
-
- #AuthType Basic
- #AuthBasicProvider ldap
- AuthName "AD brukernavn og passord"
- ProxyPass http://phppgadmin/
- #http://pgadmin_container
- ProxyPassReverse http://phppgadmin/
- #http://pgadmin_container
- #AuthLDAPURL "ldaps://ldap.uit.no/cn=people,dc=uit,dc=no?uid"
- #Require ldap-user lss734 ood000 pco000 kni006
-
-
-
- ProxyPass http://minio:9000/
- ProxyPassReverse http://minio:9000/
-
-
-
- ProxyPass http://nav/
- ProxyPassReverse http://nav/
-
-
-# Access Control:
-# With SSLRequire you can do per-directory access control based
-# on arbitrary complex boolean expressions containing server
-# variable checks and other lookup directives. The syntax is a
-# mixture between C and Perl. See the mod_ssl documentation
-# for more details.
-#
-#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \
-# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \
-# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \
-# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \
-# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \
-# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/
-#
-
-# SSL Engine Options:
-# Set various options for the SSL engine.
-# o FakeBasicAuth:
-# Translate the client X.509 into a Basic Authorisation. This means that
-# the standard Auth/DBMAuth methods can be used for access control. The
-# user name is the `one line' version of the client's X.509 certificate.
-# Note that no password is obtained from the user. Every entry in the user
-# file needs this password: `xxj31ZMTZzkVA'.
-# o ExportCertData:
-# This exports two additional environment variables: SSL_CLIENT_CERT and
-# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the
-# server (always existing) and the client (only existing when client
-# authentication is used). This can be used to import the certificates
-# into CGI scripts.
-# o StdEnvVars:
-# This exports the standard SSL/TLS related `SSL_*' environment variables.
-# Per default this exportation is switched off for performance reasons,
-# because the extraction step is an expensive operation and is usually
-# useless for serving static content. So one usually enables the
-# exportation for CGI and SSI requests only.
-# o StrictRequire:
-# This denies access when "SSLRequireSSL" or "SSLRequire" applied even
-# under a "Satisfy any" situation, i.e. when it applies access is denied
-# and no other module can change it.
-# o OptRenegotiate:
-# This enables optimized SSL connection renegotiation handling when SSL
-# directives are used in per-directory context.
-#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire
-
- SSLOptions +StdEnvVars
-
-
- SSLOptions +StdEnvVars
-
-
-# SSL Protocol Adjustments:
-# The safe and default but still SSL/TLS standard compliant shutdown
-# approach is that mod_ssl sends the close notify alert but doesn't wait for
-# the close notify alert from client. When you need a different shutdown
-# approach you can use one of the following variables:
-# o ssl-unclean-shutdown:
-# This forces an unclean shutdown when the connection is closed, i.e. no
-# SSL close notify alert is send or allowed to received. This violates
-# the SSL/TLS standard but is needed for some brain-dead browsers. Use
-# this when you receive I/O errors because of the standard approach where
-# mod_ssl sends the close notify alert.
-# o ssl-accurate-shutdown:
-# This forces an accurate shutdown when the connection is closed, i.e. a
-# SSL close notify alert is send and mod_ssl waits for the close notify
-# alert of the client. This is 100% SSL/TLS standard compliant, but in
-# practice often causes hanging connections with brain-dead browsers. Use
-# this only for browsers where you know that their SSL implementation
-# works correctly.
-# Notice: Most problems of broken clients are also related to the HTTP
-# keep-alive facility, so you usually additionally want to disable
-# keep-alive for those clients, too. Use variable "nokeepalive" for this.
-# Similarly, one has to force some clients to use HTTP/1.0 to workaround
-# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and
-# "force-response-1.0" for this.
-BrowserMatch "MSIE [2-5]" \
- nokeepalive ssl-unclean-shutdown \
- downgrade-1.0 force-response-1.0
-
-# Per-Server Logging:
-# The home of a custom SSL log file. Use this when you want a
-# compact non-error SSL logfile on a virtual host basis.
-#CustomLog /dev/stdout \
-# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
-Customlog /var/log/httpd/access.log combined
-ErrorLog /var/log/httpd/error.log
-
-ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i"
-
-#RewriteEngine On
-#RewriteRule .* https://site.uit.no/dataverseno/nn/driftsmelding/ [R=302,L]
-
-
-
-
- ProxyPass !
-
-Alias /robots.txt /var/www/robots.txt
-
diff --git a/distros/dataverse.no/configs/httpd.conf b/distros/dataverse.no/configs/httpd.conf
deleted file mode 100755
index be8e2bf..0000000
--- a/distros/dataverse.no/configs/httpd.conf
+++ /dev/null
@@ -1,551 +0,0 @@
-#
-# This is the main Apache HTTP server configuration file. It contains the
-# configuration directives that give the server its instructions.
-# See for detailed information.
-# In particular, see
-#
-# for a discussion of each configuration directive.
-#
-# Do NOT simply read the instructions in here without understanding
-# what they do. They're here only as hints or reminders. If you are unsure
-# consult the online docs. You have been warned.
-#
-# Configuration and logfile names: If the filenames you specify for many
-# of the server's control files begin with "/" (or "drive:/" for Win32), the
-# server will use that explicit path. If the filenames do *not* begin
-# with "/", the value of ServerRoot is prepended -- so "logs/access_log"
-# with ServerRoot set to "/usr/local/apache2" will be interpreted by the
-# server as "/usr/local/apache2/logs/access_log", whereas "/logs/access_log"
-# will be interpreted as '/logs/access_log'.
-
-#
-# ServerRoot: The top of the directory tree under which the server's
-# configuration, error, and log files are kept.
-#
-# Do not add a slash at the end of the directory path. If you point
-# ServerRoot at a non-local disk, be sure to specify a local disk on the
-# Mutex directive, if file-based mutexes are used. If you wish to share the
-# same ServerRoot for multiple httpd daemons, you will need to change at
-# least PidFile.
-#
-ServerRoot "/usr/local/apache2"
-
-#
-# Mutex: Allows you to set the mutex mechanism and mutex file directory
-# for individual mutexes, or change the global defaults
-#
-# Uncomment and change the directory if mutexes are file-based and the default
-# mutex file directory is not on a local disk or is not appropriate for some
-# other reason.
-#
-# Mutex default:logs
-
-#
-# Listen: Allows you to bind Apache to specific IP addresses and/or
-# ports, instead of the default. See also the
-# directive.
-#
-# Change this to Listen on specific IP addresses as shown below to
-# prevent Apache from glomming onto all bound IP addresses.
-#
-#Listen 12.34.56.78:80
-Listen 80
-
-#
-# Dynamic Shared Object (DSO) Support
-#
-# To be able to use the functionality of a module which was built as a DSO you
-# have to place corresponding `LoadModule' lines at this location so the
-# directives contained in it are actually available _before_ they are used.
-# Statically compiled modules (those listed by `httpd -l') do not need
-# to be loaded here.
-#
-# Example:
-# LoadModule foo_module modules/mod_foo.so
-#
-LoadModule mpm_event_module modules/mod_mpm_event.so
-#LoadModule mpm_prefork_module modules/mod_mpm_prefork.so
-#LoadModule mpm_worker_module modules/mod_mpm_worker.so
-LoadModule authn_file_module modules/mod_authn_file.so
-#LoadModule authn_dbm_module modules/mod_authn_dbm.so
-#LoadModule authn_anon_module modules/mod_authn_anon.so
-#LoadModule authn_dbd_module modules/mod_authn_dbd.so
-#LoadModule authn_socache_module modules/mod_authn_socache.so
-LoadModule authn_core_module modules/mod_authn_core.so
-LoadModule authz_host_module modules/mod_authz_host.so
-LoadModule authz_groupfile_module modules/mod_authz_groupfile.so
-LoadModule authz_user_module modules/mod_authz_user.so
-#LoadModule authz_dbm_module modules/mod_authz_dbm.so
-#LoadModule authz_owner_module modules/mod_authz_owner.so
-#LoadModule authz_dbd_module modules/mod_authz_dbd.so
-LoadModule authz_core_module modules/mod_authz_core.so
-#LoadModule authnz_ldap_module modules/mod_authnz_ldap.so
-#LoadModule authnz_fcgi_module modules/mod_authnz_fcgi.so
-LoadModule access_compat_module modules/mod_access_compat.so
-LoadModule auth_basic_module modules/mod_auth_basic.so
-#LoadModule auth_form_module modules/mod_auth_form.so
-#LoadModule auth_digest_module modules/mod_auth_digest.so
-#LoadModule allowmethods_module modules/mod_allowmethods.so
-#LoadModule isapi_module modules/mod_isapi.so
-#LoadModule file_cache_module modules/mod_file_cache.so
-#LoadModule cache_module modules/mod_cache.so
-#LoadModule cache_disk_module modules/mod_cache_disk.so
-#LoadModule cache_socache_module modules/mod_cache_socache.so
-LoadModule socache_shmcb_module modules/mod_socache_shmcb.so
-#LoadModule socache_dbm_module modules/mod_socache_dbm.so
-#LoadModule socache_memcache_module modules/mod_socache_memcache.so
-#LoadModule socache_redis_module modules/mod_socache_redis.so
-#LoadModule watchdog_module modules/mod_watchdog.so
-#LoadModule macro_module modules/mod_macro.so
-#LoadModule dbd_module modules/mod_dbd.so
-#LoadModule bucketeer_module modules/mod_bucketeer.so
-#LoadModule dumpio_module modules/mod_dumpio.so
-#LoadModule echo_module modules/mod_echo.so
-#LoadModule example_hooks_module modules/mod_example_hooks.so
-#LoadModule case_filter_module modules/mod_case_filter.so
-#LoadModule case_filter_in_module modules/mod_case_filter_in.so
-#LoadModule example_ipc_module modules/mod_example_ipc.so
-#LoadModule buffer_module modules/mod_buffer.so
-#LoadModule data_module modules/mod_data.so
-#LoadModule ratelimit_module modules/mod_ratelimit.so
-LoadModule reqtimeout_module modules/mod_reqtimeout.so
-#LoadModule ext_filter_module modules/mod_ext_filter.so
-#LoadModule request_module modules/mod_request.so
-#LoadModule include_module modules/mod_include.so
-LoadModule filter_module modules/mod_filter.so
-#LoadModule reflector_module modules/mod_reflector.so
-#LoadModule substitute_module modules/mod_substitute.so
-#LoadModule sed_module modules/mod_sed.so
-#LoadModule charset_lite_module modules/mod_charset_lite.so
-#LoadModule deflate_module modules/mod_deflate.so
-#LoadModule xml2enc_module modules/mod_xml2enc.so
-#LoadModule proxy_html_module modules/mod_proxy_html.so
-#LoadModule brotli_module modules/mod_brotli.so
-LoadModule mime_module modules/mod_mime.so
-#LoadModule ldap_module modules/mod_ldap.so
-LoadModule log_config_module modules/mod_log_config.so
-#LoadModule log_debug_module modules/mod_log_debug.so
-#LoadModule log_forensic_module modules/mod_log_forensic.so
-#LoadModule logio_module modules/mod_logio.so
-#LoadModule lua_module modules/mod_lua.so
-LoadModule env_module modules/mod_env.so
-#LoadModule mime_magic_module modules/mod_mime_magic.so
-#LoadModule cern_meta_module modules/mod_cern_meta.so
-#LoadModule expires_module modules/mod_expires.so
-LoadModule headers_module modules/mod_headers.so
-#LoadModule ident_module modules/mod_ident.so
-#LoadModule usertrack_module modules/mod_usertrack.so
-#LoadModule unique_id_module modules/mod_unique_id.so
-LoadModule setenvif_module modules/mod_setenvif.so
-LoadModule version_module modules/mod_version.so
-#LoadModule remoteip_module modules/mod_remoteip.so
-#LoadModule proxy_module modules/mod_proxy.so
-#LoadModule proxy_connect_module modules/mod_proxy_connect.so
-#LoadModule proxy_ftp_module modules/mod_proxy_ftp.so
-#LoadModule proxy_http_module modules/mod_proxy_http.so
-#LoadModule proxy_fcgi_module modules/mod_proxy_fcgi.so
-#LoadModule proxy_scgi_module modules/mod_proxy_scgi.so
-#LoadModule proxy_uwsgi_module modules/mod_proxy_uwsgi.so
-#LoadModule proxy_fdpass_module modules/mod_proxy_fdpass.so
-#LoadModule proxy_wstunnel_module modules/mod_proxy_wstunnel.so
-#LoadModule proxy_ajp_module modules/mod_proxy_ajp.so
-#LoadModule proxy_balancer_module modules/mod_proxy_balancer.so
-#LoadModule proxy_express_module modules/mod_proxy_express.so
-#LoadModule proxy_hcheck_module modules/mod_proxy_hcheck.so
-#LoadModule session_module modules/mod_session.so
-#LoadModule session_cookie_module modules/mod_session_cookie.so
-#LoadModule session_crypto_module modules/mod_session_crypto.so
-#LoadModule session_dbd_module modules/mod_session_dbd.so
-#LoadModule slotmem_shm_module modules/mod_slotmem_shm.so
-#LoadModule slotmem_plain_module modules/mod_slotmem_plain.so
-LoadModule ssl_module modules/mod_ssl.so
-#LoadModule optional_hook_export_module modules/mod_optional_hook_export.so
-#LoadModule optional_hook_import_module modules/mod_optional_hook_import.so
-#LoadModule optional_fn_import_module modules/mod_optional_fn_import.so
-#LoadModule optional_fn_export_module modules/mod_optional_fn_export.so
-#LoadModule dialup_module modules/mod_dialup.so
-#LoadModule http2_module modules/mod_http2.so
-#LoadModule proxy_http2_module modules/mod_proxy_http2.so
-#LoadModule md_module modules/mod_md.so
-#LoadModule lbmethod_byrequests_module modules/mod_lbmethod_byrequests.so
-#LoadModule lbmethod_bytraffic_module modules/mod_lbmethod_bytraffic.so
-#LoadModule lbmethod_bybusyness_module modules/mod_lbmethod_bybusyness.so
-#LoadModule lbmethod_heartbeat_module modules/mod_lbmethod_heartbeat.so
-LoadModule unixd_module modules/mod_unixd.so
-#LoadModule heartbeat_module modules/mod_heartbeat.so
-#LoadModule heartmonitor_module modules/mod_heartmonitor.so
-#LoadModule dav_module modules/mod_dav.so
-LoadModule status_module modules/mod_status.so
-LoadModule autoindex_module modules/mod_autoindex.so
-#LoadModule asis_module modules/mod_asis.so
-#LoadModule info_module modules/mod_info.so
-#LoadModule suexec_module modules/mod_suexec.so
-
- #LoadModule cgid_module modules/mod_cgid.so
-
-
- #LoadModule cgi_module modules/mod_cgi.so
-
-#LoadModule dav_fs_module modules/mod_dav_fs.so
-#LoadModule dav_lock_module modules/mod_dav_lock.so
-#LoadModule vhost_alias_module modules/mod_vhost_alias.so
-#LoadModule negotiation_module modules/mod_negotiation.so
-LoadModule dir_module modules/mod_dir.so
-#LoadModule imagemap_module modules/mod_imagemap.so
-#LoadModule actions_module modules/mod_actions.so
-#LoadModule speling_module modules/mod_speling.so
-#LoadModule userdir_module modules/mod_userdir.so
-LoadModule alias_module modules/mod_alias.so
-#LoadModule rewrite_module modules/mod_rewrite.so
-
-
-#
-# If you wish httpd to run as a different user or group, you must run
-# httpd as root initially and it will switch.
-#
-# User/Group: The name (or #number) of the user/group to run httpd as.
-# It is usually good practice to create a dedicated user and group for
-# running httpd, as with most system services.
-#
-User www-data
-Group www-data
-
-
-
-# 'Main' server configuration
-#
-# The directives in this section set up the values used by the 'main'
-# server, which responds to any requests that aren't handled by a
-# definition. These values also provide defaults for
-# any containers you may define later in the file.
-#
-# All of these directives may appear inside containers,
-# in which case these default settings will be overridden for the
-# virtual host being defined.
-#
-
-#
-# ServerAdmin: Your address, where problems with the server should be
-# e-mailed. This address appears on some server-generated pages, such
-# as error documents. e.g. admin@your-domain.com
-#
-ServerAdmin you@example.com
-
-#
-# ServerName gives the name and port that the server uses to identify itself.
-# This can often be determined automatically, but we recommend you specify
-# it explicitly to prevent problems during startup.
-#
-# If your host doesn't have a registered DNS name, enter its IP address here.
-#
-#ServerName www.example.com:80
-
-#
-# Deny access to the entirety of your server's filesystem. You must
-# explicitly permit access to web content directories in other
-# blocks below.
-#
-
- AllowOverride none
- Require all denied
-
-
-#
-# Note that from this point forward you must specifically allow
-# particular features to be enabled - so if something's not working as
-# you might expect, make sure that you have specifically enabled it
-# below.
-#
-
-#
-# DocumentRoot: The directory out of which you will serve your
-# documents. By default, all requests are taken from this directory, but
-# symbolic links and aliases may be used to point to other locations.
-#
-DocumentRoot "/usr/local/apache2/htdocs-ssl"
-
- #
- # Possible values for the Options directive are "None", "All",
- # or any combination of:
- # Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews
- #
- # Note that "MultiViews" must be named *explicitly* --- "Options All"
- # doesn't give it to you.
- #
- # The Options directive is both complicated and important. Please see
- # http://httpd.apache.org/docs/2.4/mod/core.html#options
- # for more information.
- #
- Options Indexes FollowSymLinks
-
- #
- # AllowOverride controls what directives may be placed in .htaccess files.
- # It can be "All", "None", or any combination of the keywords:
- # AllowOverride FileInfo AuthConfig Limit
- #
- AllowOverride None
-
- #
- # Controls who can get stuff from this server.
- #
- Require all granted
-
-
-#
-# DirectoryIndex: sets the file that Apache will serve if a directory
-# is requested.
-#
-
- DirectoryIndex index.html
-
-
-#
-# The following lines prevent .htaccess and .htpasswd files from being
-# viewed by Web clients.
-#
-
- Require all denied
-
-
-#
-# ErrorLog: The location of the error log file.
-# If you do not specify an ErrorLog directive within a
-# container, error messages relating to that virtual host will be
-# logged here. If you *do* define an error logfile for a
-# container, that host's errors will be logged there and not here.
-#
-ErrorLog /proc/self/fd/2
-
-#
-# LogLevel: Control the number of messages logged to the error_log.
-# Possible values include: debug, info, notice, warn, error, crit,
-# alert, emerg.
-#
-LogLevel warn
-
-
- #
- # The following directives define some format nicknames for use with
- # a CustomLog directive (see below).
- #
- LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
- LogFormat "%h %l %u %t \"%r\" %>s %b" common
-
-
- # You need to enable mod_logio.c to use %I and %O
- LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio
-
-
- #
- # The location and format of the access logfile (Common Logfile Format).
- # If you do not define any access logfiles within a
- # container, they will be logged here. Contrariwise, if you *do*
- # define per- access logfiles, transactions will be
- # logged therein and *not* in this file.
- #
- CustomLog /proc/self/fd/1 common
-
- #
- # If you prefer a logfile with access, agent, and referer information
- # (Combined Logfile Format) you can use the following directive.
- #
- #CustomLog "logs/access_log" combined
-
-
-
- #
- # Redirect: Allows you to tell clients about documents that used to
- # exist in your server's namespace, but do not anymore. The client
- # will make a new request for the document at its new location.
- # Example:
- # Redirect permanent /foo http://www.example.com/bar
-
- #
- # Alias: Maps web paths into filesystem paths and is used to
- # access content that does not live under the DocumentRoot.
- # Example:
- # Alias /webpath /full/filesystem/path
- #
- # If you include a trailing / on /webpath then the server will
- # require it to be present in the URL. You will also likely
- # need to provide a section to allow access to
- # the filesystem path.
-
- #
- # ScriptAlias: This controls which directories contain server scripts.
- # ScriptAliases are essentially the same as Aliases, except that
- # documents in the target directory are treated as applications and
- # run by the server when requested rather than as documents sent to the
- # client. The same rules about trailing "/" apply to ScriptAlias
- # directives as to Alias.
- #
- ScriptAlias /cgi-bin/ "/usr/local/apache2/cgi-bin/"
-
-
-
-
- #
- # ScriptSock: On threaded servers, designate the path to the UNIX
- # socket used to communicate with the CGI daemon of mod_cgid.
- #
- #Scriptsock cgisock
-
-
-#
-# "/usr/local/apache2/cgi-bin" should be changed to whatever your ScriptAliased
-# CGI directory exists, if you have that configured.
-#
-
- AllowOverride None
- Options None
- Require all granted
-
-
-
- #
- # Avoid passing HTTP_PROXY environment to CGI's on this or any proxied
- # backend servers which have lingering "httpoxy" defects.
- # 'Proxy' request header is undefined by the IETF, not listed by IANA
- #
- RequestHeader unset Proxy early
-
-
-
- #
- # TypesConfig points to the file containing the list of mappings from
- # filename extension to MIME-type.
- #
- TypesConfig conf/mime.types
-
- #
- # AddType allows you to add to or override the MIME configuration
- # file specified in TypesConfig for specific file types.
- #
- #AddType application/x-gzip .tgz
- #
- # AddEncoding allows you to have certain browsers uncompress
- # information on the fly. Note: Not all browsers support this.
- #
- #AddEncoding x-compress .Z
- #AddEncoding x-gzip .gz .tgz
- #
- # If the AddEncoding directives above are commented-out, then you
- # probably should define those extensions to indicate media types:
- #
- AddType application/x-compress .Z
- AddType application/x-gzip .gz .tgz
-
- #
- # AddHandler allows you to map certain file extensions to "handlers":
- # actions unrelated to filetype. These can be either built into the server
- # or added with the Action directive (see below)
- #
- # To use CGI scripts outside of ScriptAliased directories:
- # (You will also need to add "ExecCGI" to the "Options" directive.)
- #
- #AddHandler cgi-script .cgi
-
- # For type maps (negotiated resources):
- #AddHandler type-map var
-
- #
- # Filters allow you to process content before it is sent to the client.
- #
- # To parse .shtml files for server-side includes (SSI):
- # (You will also need to add "Includes" to the "Options" directive.)
- #
- #AddType text/html .shtml
- #AddOutputFilter INCLUDES .shtml
-
-
-#
-# The mod_mime_magic module allows the server to use various hints from the
-# contents of the file itself to determine its type. The MIMEMagicFile
-# directive tells the module where the hint definitions are located.
-#
-#MIMEMagicFile conf/magic
-
-#
-# Customizable error responses come in three flavors:
-# 1) plain text 2) local redirects 3) external redirects
-#
-# Some examples:
-#ErrorDocument 500 "The server made a boo boo."
-#ErrorDocument 404 /missing.html
-#ErrorDocument 404 "/cgi-bin/missing_handler.pl"
-#ErrorDocument 402 http://www.example.com/subscription_info.html
-#
-
-#
-# MaxRanges: Maximum number of Ranges in a request before
-# returning the entire resource, or one of the special
-# values 'default', 'none' or 'unlimited'.
-# Default setting is to accept 200 Ranges.
-#MaxRanges unlimited
-
-#
-# EnableMMAP and EnableSendfile: On systems that support it,
-# memory-mapping or the sendfile syscall may be used to deliver
-# files. This usually improves server performance, but must
-# be turned off when serving from networked-mounted
-# filesystems or if support for these functions is otherwise
-# broken on your system.
-# Defaults: EnableMMAP On, EnableSendfile Off
-#
-#EnableMMAP off
-#EnableSendfile on
-
-# Supplemental configuration
-#
-# The configuration files in the conf/extra/ directory can be
-# included to add extra features or to modify the default configuration of
-# the server, or you may simply copy their contents here and change as
-# necessary.
-
-# Server-pool management (MPM specific)
-#Include conf/extra/httpd-mpm.conf
-
-# Multi-language error messages
-#Include conf/extra/httpd-multilang-errordoc.conf
-
-# Fancy directory listings
-#Include conf/extra/httpd-autoindex.conf
-
-# Language settings
-#Include conf/extra/httpd-languages.conf
-
-# User home directories
-#Include conf/extra/httpd-userdir.conf
-
-# Real-time info on requests and configuration
-#Include conf/extra/httpd-info.conf
-
-# Virtual hosts
-#Include conf/extra/httpd-vhosts.conf
-
-# Local access to the Apache HTTP Server Manual
-#Include conf/extra/httpd-manual.conf
-
-# Distributed authoring and versioning (WebDAV)
-#Include conf/extra/httpd-dav.conf
-
-# Various default settings
-#Include conf/extra/httpd-default.conf
-
-# Configure mod_proxy_html to understand HTML4/XHTML1
-
-Include conf/extra/proxy-html.conf
-
-
-# Secure (SSL/TLS) connections
-Include conf/extra/httpd-ssl.conf
-#
-# Note: The following must must be present to support
-# starting without SSL on platforms with no /dev/random equivalent
-# but a statically compiled-in mod_ssl.
-#
-
-SSLRandomSeed startup builtin
-SSLRandomSeed connect builtin
-
-
From e4c5d0e5c6a0fd1aed443895b9405e15c3a9973a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 21 Mar 2024 15:07:00 +0000
Subject: [PATCH 319/354] change new shibboleth sp
---
distros/dataverse.no/docker-compose.yaml | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index 1ffbae2..a005065 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -65,7 +65,7 @@ services:
networks:
- traefik
#image: test03/shibboleth:3.3.0.B
- image: ${DOCKER_HUB}/shibboleth:3.4.1-1
+ image: ${DOCKER_HUB}/shibboleth:3.4.1-2
container_name: shibboleth
hostname: shibboleth
privileged: true
@@ -76,10 +76,12 @@ services:
- ${LOGS_PATH}/shibboleth/httpd:/var/log/httpd
# - ${LOGS_PATH}/shibboleth/shibboleth:/var/log/shibboleth
- ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth
- - ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf
+ # - ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf
+ - ./configs/http-ssl.conf:/usr/local/apache2/conf/httpd.conf
+ - ./configs/http-dataverse.conf:/usr/local/apache2/conf/extra/httpd-vhosts.conf
- ./configs/robots.txt:/var/www/robots.txt
- - ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/pki/tls/certs/localhost.crt
- - ${CONFIGURATION_PATH}/configuration/files/localhost.key:/etc/pki/tls/private/localhost.key
+ - ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/ssl/certs/localhost.crt
+ - ${CONFIGURATION_PATH}/configuration/files/localhost.key:/etc/ssl/private/localhost.key
# hostname: ${hostname}
labels:
- "traefik.enable=true"
From 9c7b2e7e25a04bd01a1923c4348f573bb483277a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Fri, 22 Mar 2024 10:29:39 +0000
Subject: [PATCH 320/354] fixed geobox
---
.../dataverse.no/migration/_dvno_geolocation_cleaning.sh | 2 +-
.../migration/dvno_geolocation_cleaning20240322.sql | 6 ++++++
2 files changed, 7 insertions(+), 1 deletion(-)
create mode 100644 distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
index d79132e..b71ccac 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
@@ -1,3 +1,3 @@
#!/bin/bash
export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f ./_dvno_geolocation_cleaning20240301.sql
+psql -U dataverse dataverse -h postgres -f /tmp/dvno_geolocation_cleaning20240320.sql
diff --git a/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql b/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
new file mode 100644
index 0000000..a0b6ac8
--- /dev/null
+++ b/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
@@ -0,0 +1,6 @@
+update datasetfieldvalue set value='7.667742' where id=210693;
+update datasetfieldvalue set value='7.667742' where id=210679;
+update datasetfieldvalue set value='64.642997' where id=210678;
+update datasetfieldvalue set value='64.642997' where id=210692;
+update datasetfieldvalue set value='71.5' where id=206388;
+update datasetfieldvalue set value='69' where id=206399
From e4f29c7b05831461e1e121cafe4654d7fab16de3 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 25 Mar 2024 15:11:52 +0000
Subject: [PATCH 321/354] fixed geobor for KWP9WA and YUNHFP
---
.../dataverse.no/migration/_dvno_geolocation_cleaning.sh | 2 +-
.../migration/dvno_geolocation_cleaning20240322.sql | 6 ++++++
2 files changed, 7 insertions(+), 1 deletion(-)
create mode 100644 distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
index d79132e..b71ccac 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
@@ -1,3 +1,3 @@
#!/bin/bash
export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f ./_dvno_geolocation_cleaning20240301.sql
+psql -U dataverse dataverse -h postgres -f /tmp/dvno_geolocation_cleaning20240320.sql
diff --git a/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql b/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
new file mode 100644
index 0000000..a0b6ac8
--- /dev/null
+++ b/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
@@ -0,0 +1,6 @@
+update datasetfieldvalue set value='7.667742' where id=210693;
+update datasetfieldvalue set value='7.667742' where id=210679;
+update datasetfieldvalue set value='64.642997' where id=210678;
+update datasetfieldvalue set value='64.642997' where id=210692;
+update datasetfieldvalue set value='71.5' where id=206388;
+update datasetfieldvalue set value='69' where id=206399
From 1174f19b3670169175c54586895ac387b0e5c286 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 26 Mar 2024 10:32:01 +0000
Subject: [PATCH 322/354] Revert "Updated both Matomo code and opt-out code"
This reverts commit 3aa381ed12b79b4c71a93a8fe05243a26e476902.
---
.../dataverse.no/modification/analytics.xhtml | 285 +-----------------
1 file changed, 3 insertions(+), 282 deletions(-)
diff --git a/distros/dataverse.no/modification/analytics.xhtml b/distros/dataverse.no/modification/analytics.xhtml
index 533613a..a42753d 100644
--- a/distros/dataverse.no/modification/analytics.xhtml
+++ b/distros/dataverse.no/modification/analytics.xhtml
@@ -1,7 +1,8 @@
+
+
-
-
-
From f92d32ea66abd309957de4e95ce14bd4dec5d493 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 27 Mar 2024 08:57:04 +0100
Subject: [PATCH 323/354] remove geo staff
---
.../dataverse.no/migration/_dvno_geolocation_cleaning.sh | 2 +-
.../migration/dvno_geolocation_cleaning20240322.sql | 6 ------
2 files changed, 1 insertion(+), 7 deletions(-)
delete mode 100644 distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
index b71ccac..d79132e 100644
--- a/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
+++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh
@@ -1,3 +1,3 @@
#!/bin/bash
export PGPASSWORD=`cat /secrets/db/password`
-psql -U dataverse dataverse -h postgres -f /tmp/dvno_geolocation_cleaning20240320.sql
+psql -U dataverse dataverse -h postgres -f ./_dvno_geolocation_cleaning20240301.sql
diff --git a/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql b/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
deleted file mode 100644
index a0b6ac8..0000000
--- a/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql
+++ /dev/null
@@ -1,6 +0,0 @@
-update datasetfieldvalue set value='7.667742' where id=210693;
-update datasetfieldvalue set value='7.667742' where id=210679;
-update datasetfieldvalue set value='64.642997' where id=210678;
-update datasetfieldvalue set value='64.642997' where id=210692;
-update datasetfieldvalue set value='71.5' where id=206388;
-update datasetfieldvalue set value='69' where id=206399
From c0dc94357901af0078adbadebfb14f61e5014ac6 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 27 Mar 2024 09:09:51 +0100
Subject: [PATCH 324/354] matomo right code
---
.../dataverse.no/modification/analytics.xhtml | 324 ++++++++++++++++--
1 file changed, 302 insertions(+), 22 deletions(-)
diff --git a/distros/dataverse.no/modification/analytics.xhtml b/distros/dataverse.no/modification/analytics.xhtml
index a42753d..d0bcdfa 100644
--- a/distros/dataverse.no/modification/analytics.xhtml
+++ b/distros/dataverse.no/modification/analytics.xhtml
@@ -1,23 +1,303 @@
-
-
-
-
-
+ var _paq = window._paq || [];
+ /* tracker methods like "setCustomDimension" should be called before "trackPageView" */
+ _paq.push(['setDownloadClasses', "btn-download"]);
+ _paq.push(['trackPageView']);
+ _paq.push(['enableLinkTracking']);
+ (function() {
+ var u="//www.ub.uit.no/matomo/";
+ _paq.push(['setTrackerUrl', u+'matomo.php']);
+ _paq.push(['setSiteId', '6']);
+ var d=document, g=d.createElement('script'), s=d.getElementsByTagName('script')[0];
+ g.type='text/javascript'; g.async=true; g.defer=true; g.src=u+'matomo.js'; s.parentNode.insertBefore(g,s);
+ })();
+
+
+
+
+
+
+
+
\ No newline at end of file
From 00cdc4b659708bf9cd52269afeba5cbda7228c82 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 10:51:53 +0200
Subject: [PATCH 325/354] Etag change text file storige
---
.../init.d/cronjob/checkETagByFiles.sh | 19 ++++++++++---------
1 file changed, 10 insertions(+), 9 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 83ec959..02f8d37 100644
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -9,20 +9,20 @@ if [ ! -d "${LogDir}" ]; then
LogDir="/var/log/"
fi
-if [ ! -d "/tmp/" ]; then
- mkdir -p "/tmp/"
+if [ ! -d "/mnt/" ]; then
+ mkdir -p "/mnt/"
fi
LogFile="${LogDir}checkETag_`date +%Y%m%d_%H%M%z`.log"
-if [ ! -f "/tmp/dataverse_checkETag.txt" ]; then
+if [ ! -f "/mnt/dataverse_checkETag.txt" ]; then
psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /tmp/dataverse_checkETag.txt
fi
#while read p; do
while true; do
- line=$(head -n 1 /tmp/dataverse_checkETag.txt)
+ line=$(head -n 1 /mnt/dataverse_checkETag.txt)
IFS=' ' read -a arrayData <<< "$line"
#echo ${arrayData[0]}
@@ -39,16 +39,17 @@ while true; do
fi
fi
- tail -n +2 "/tmp/dataverse_checkETag.txt" > "/tmp/dataverse_checkETag.txt.tmp"
- mv "/tmp/dataverse_checkETag.txt.tmp" "/tmp/dataverse_checkETag.txt"
+ #tail -n +2 "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
+ #mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
- if [ ! -s "/tmp/dataverse_checkETag.txt" ]; then
+ sed '1d' "/mnt/dataverse_checkETag.txt"
+
+ if [ ! -s "/mnt/dataverse_checkETag.txt" ]; then
exit 0
fi
sleep 1s
done
-#done < /tmp/dataverse_checkETag.txt
-rm /tmp/dataverse_checkETag.txt
\ No newline at end of file
+rm /mnt/dataverse_checkETag.txt
\ No newline at end of file
From bed1649ff827c1b1b87d509eb7b8db0027a7140a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Thu, 4 Apr 2024 08:56:53 +0000
Subject: [PATCH 326/354] change mode of checkETag file
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 0
1 file changed, 0 insertions(+), 0 deletions(-)
mode change 100644 => 100755 distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
old mode 100644
new mode 100755
From 9548454ff055f6a0b32fba596522ed587abb4cb9 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 11:04:33 +0200
Subject: [PATCH 327/354] check if file exist
---
.../init.d/cronjob/checkETagByFiles.sh | 38 ++++++++++---------
1 file changed, 20 insertions(+), 18 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 02f8d37..de6dac9 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -22,33 +22,35 @@ fi
#while read p; do
while true; do
- line=$(head -n 1 /mnt/dataverse_checkETag.txt)
+ if [ -f "/mnt/dataverse_checkETag.txt" ]; then
+ line=$(head -n 1 /mnt/dataverse_checkETag.txt)
- IFS=' ' read -a arrayData <<< "$line"
- #echo ${arrayData[0]}
+ IFS=' ' read -a arrayData <<< "$line"
+ #echo ${arrayData[0]}
- s3ETag=$(aws s3api --endpoint https://$aws_endpoint head-object --bucket $aws_bucket_name --key ${arrayData[0]} 2> /dev/null | jq .ETag | sed 's/\"//g' | sed 's/\\//g')
-
+ s3ETag=$(aws s3api --endpoint https://$aws_endpoint head-object --bucket $aws_bucket_name --key ${arrayData[0]} 2> /dev/null | jq .ETag | sed 's/\"//g' | sed 's/\\//g')
+
- if [ -z "${s3ETag}" ]; then
- echo "is not exist in the s3 storage: ${arrayData[0]}" >> ${LogFile}
- else
+ if [ -z "${s3ETag}" ]; then
+ echo "is not exist in the s3 storage: ${arrayData[0]}" >> ${LogFile}
+ else
- if [ "${s3ETag}" != "${arrayData[1]}" ]; then
- echo "is not equal: ${arrayData[0]}" >> ${LogFile}
+ if [ "${s3ETag}" != "${arrayData[1]}" ]; then
+ echo "is not equal: ${arrayData[0]}" >> ${LogFile}
+ fi
fi
- fi
- #tail -n +2 "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
- #mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
+ #tail -n +2 "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
+ #mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
- sed '1d' "/mnt/dataverse_checkETag.txt"
+ sed '1d' "/mnt/dataverse_checkETag.txt"
- if [ ! -s "/mnt/dataverse_checkETag.txt" ]; then
- exit 0
- fi
+ if [ ! -s "/mnt/dataverse_checkETag.txt" ]; then
+ exit 0
+ fi
- sleep 1s
+ sleep 1s
+ fi
done
From 6625b4ae3026338687b6f92f8f2654336eb64ff4 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 11:12:00 +0200
Subject: [PATCH 328/354] Fix bug by wriding file
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index de6dac9..ae60412 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -14,9 +14,10 @@ if [ ! -d "/mnt/" ]; then
fi
LogFile="${LogDir}checkETag_`date +%Y%m%d_%H%M%z`.log"
-
if [ ! -f "/mnt/dataverse_checkETag.txt" ]; then
- psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /tmp/dataverse_checkETag.txt
+ echo "`date +%Y%m%d_%H%M%z`: Start psql" > ${LogFile}
+ psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /mnt/dataverse_checkETag.txt
+ echo "`date +%Y%m%d_%H%M%z`: END psql" >> ${LogFile}
fi
#while read p; do
From 5a70e4340be2cff20d1ab8330e1bcf48756074b3 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 11:18:13 +0200
Subject: [PATCH 329/354] rewride file
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index ae60412..abffbf8 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -44,7 +44,7 @@ while true; do
#tail -n +2 "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
#mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
- sed '1d' "/mnt/dataverse_checkETag.txt"
+ sed '1d' "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt"
if [ ! -s "/mnt/dataverse_checkETag.txt" ]; then
exit 0
From 039822e35da4f8b92dced0cf2b959c1c05a2de2f Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 11:21:45 +0200
Subject: [PATCH 330/354] tail or sed remove line
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index abffbf8..7962f1f 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -42,9 +42,8 @@ while true; do
fi
#tail -n +2 "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
- #mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
-
- sed '1d' "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt"
+ sed '1d' "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
+ mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
if [ ! -s "/mnt/dataverse_checkETag.txt" ]; then
exit 0
From 5478449a7431a040f577e39f988ff1aa9e5374be Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 11:23:38 +0200
Subject: [PATCH 331/354] remove file by exit
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 7962f1f..974cdfb 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -46,6 +46,7 @@ while true; do
mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
if [ ! -s "/mnt/dataverse_checkETag.txt" ]; then
+ rm /mnt/dataverse_checkETag.txt
exit 0
fi
@@ -53,5 +54,4 @@ while true; do
fi
done
-
-rm /mnt/dataverse_checkETag.txt
\ No newline at end of file
+exit 0
\ No newline at end of file
From 6c42227b368f12f79100199fb1efbcef309728bd Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 12:06:14 +0200
Subject: [PATCH 332/354] logout put change
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 974cdfb..f6381c6 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -33,11 +33,11 @@ while true; do
if [ -z "${s3ETag}" ]; then
- echo "is not exist in the s3 storage: ${arrayData[0]}" >> ${LogFile}
+ echo "is not exist in the s3 storage: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile}
else
if [ "${s3ETag}" != "${arrayData[1]}" ]; then
- echo "is not equal: ${arrayData[0]}" >> ${LogFile}
+ echo "is not equal: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile}
fi
fi
From 0cabb11c43d24990ff9af9ff0cb93b50fc4db0b3 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 15:27:43 +0200
Subject: [PATCH 333/354] rename file from checkfiles to checkETagByFiles
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 2 +-
.../init.d/cronjob/{checkfiles.sql => checkETagByFiles.sql} | 0
2 files changed, 1 insertion(+), 1 deletion(-)
rename distros/dataverse.no/init.d/cronjob/{checkfiles.sql => checkETagByFiles.sql} (100%)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index f6381c6..6201c46 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -16,7 +16,7 @@ fi
LogFile="${LogDir}checkETag_`date +%Y%m%d_%H%M%z`.log"
if [ ! -f "/mnt/dataverse_checkETag.txt" ]; then
echo "`date +%Y%m%d_%H%M%z`: Start psql" > ${LogFile}
- psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /mnt/dataverse_checkETag.txt
+ psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkETagByFiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /mnt/dataverse_checkETag.txt
echo "`date +%Y%m%d_%H%M%z`: END psql" >> ${LogFile}
fi
diff --git a/distros/dataverse.no/init.d/cronjob/checkfiles.sql b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
similarity index 100%
rename from distros/dataverse.no/init.d/cronjob/checkfiles.sql
rename to distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
From 04b5ee3f395f9dfff78bc94ffbfd2342b9d7cf2d Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 17:09:35 +0200
Subject: [PATCH 334/354] change names and add sql if publisht and protocol and
identifier exist
---
.../init.d/cronjob/checkETagByFiles.sh | 18 +++++++++---------
.../init.d/cronjob/checkETagByFiles.sql | 2 +-
2 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
index 6201c46..8a4179c 100755
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -14,17 +14,17 @@ if [ ! -d "/mnt/" ]; then
fi
LogFile="${LogDir}checkETag_`date +%Y%m%d_%H%M%z`.log"
-if [ ! -f "/mnt/dataverse_checkETag.txt" ]; then
+if [ ! -f "/mnt/checkETagByFiles.txt" ]; then
echo "`date +%Y%m%d_%H%M%z`: Start psql" > ${LogFile}
- psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkETagByFiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /mnt/dataverse_checkETag.txt
+ psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkETagByFiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /mnt/checkETagByFiles.txt
echo "`date +%Y%m%d_%H%M%z`: END psql" >> ${LogFile}
fi
#while read p; do
while true; do
- if [ -f "/mnt/dataverse_checkETag.txt" ]; then
- line=$(head -n 1 /mnt/dataverse_checkETag.txt)
+ if [ -f "/mnt/checkETagByFiles.txt" ]; then
+ line=$(head -n 1 /mnt/checkETagByFiles.txt)
IFS=' ' read -a arrayData <<< "$line"
#echo ${arrayData[0]}
@@ -41,12 +41,12 @@ while true; do
fi
fi
- #tail -n +2 "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
- sed '1d' "/mnt/dataverse_checkETag.txt" > "/mnt/dataverse_checkETag.txt.tmp"
- mv "/mnt/dataverse_checkETag.txt.tmp" "/mnt/dataverse_checkETag.txt"
+ #tail -n +2 "/mnt/checkETagByFiles.txt" > "/mnt/checkETagByFiles.txt.tmp"
+ sed '1d' "/mnt/checkETagByFiles.txt" > "/mnt/checkETagByFiles.txt.tmp"
+ mv "/mnt/checkETagByFiles.txt.tmp" "/mnt/checkETagByFiles.txt"
- if [ ! -s "/mnt/dataverse_checkETag.txt" ]; then
- rm /mnt/dataverse_checkETag.txt
+ if [ ! -s "/mnt/checkETagByFiles.txt" ]; then
+ rm /mnt/checkETagByFiles.txt
exit 0
fi
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
index 9f73dee..749db56 100644
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
@@ -1,3 +1,3 @@
--select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '2 day');
-select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', storageidentifier ), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile';
\ No newline at end of file
+select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', storageidentifier ), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and publicationdate notnull and protocol notnull and identifier notnull;
\ No newline at end of file
From 4b1345b7a9caec28478fc530bfca0574c7c502a6 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Thu, 4 Apr 2024 17:31:29 +0200
Subject: [PATCH 335/354] change sql request
---
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
index 749db56..e3a1ce3 100644
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql
@@ -1,3 +1,3 @@
--select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '2 day');
-select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', storageidentifier ), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and publicationdate notnull and protocol notnull and identifier notnull;
\ No newline at end of file
+select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', storageidentifier ), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and publicationdate IS NOT NULL and protocol IS NOT NULL and identifier IS NOT NULL;
\ No newline at end of file
From 6c05b7e4e94c1753c99ef885f99106049586b1f6 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Mon, 8 Apr 2024 08:35:31 +0200
Subject: [PATCH 336/354] bug fix: backup Data aws not delete
---
distros/dataverse.no/init.d/cronjob/backupData.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
index 4e82e68..6004cd7 100755
--- a/distros/dataverse.no/init.d/cronjob/backupData.sh
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -33,7 +33,7 @@ for file in $files
done
#echo $files
-rm -rf ~/.aws
+#rm -rf ~/.aws
#cp -r /secrets/aws-cli/.aws ~
#aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file
From acf74cdbcc93762350e2b0875ad854508cc9c6d3 Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 16 Apr 2024 15:15:10 +0000
Subject: [PATCH 337/354] fixed database copy
---
distros/dataverse.no/init.d/cronjob/backupData.sh | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh
index 6004cd7..6dacf6f 100755
--- a/distros/dataverse.no/init.d/cronjob/backupData.sh
+++ b/distros/dataverse.no/init.d/cronjob/backupData.sh
@@ -11,8 +11,9 @@ cp -r /secrets/aws-cli/.aws ~
dumpName="dataverse.`date +%Y%m%d_%H%M%z`.dump.gz"
-if [ -d "/mntblob/data/databaseDumps/" ]; then
- cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/${dumpName}
+if [ -d "/data/databaseDumps/" ]; then
+ cp /mnt/dataverse.dump.gz /data/databaseDumps/${dumpName}
+ echo "copied" ${dumpName}
fi
aws s3 --endpoint https://$aws_endpoint cp /mnt/dataverse.dump.gz s3://$aws_bucket_name/databaseDumps/${dumpName}
From 0ec4a02d9685aec2e3a3e85b906ed58ef3c741cd Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Mon, 22 Apr 2024 13:09:42 +0000
Subject: [PATCH 338/354] check md5 from blob
---
.env_sample | 3 ++
distros/dataverse.no/docker-compose.yaml | 2 ++
.../init.d/cronjob/checkETagByFiles.sh | 30 ++++++++++++++-----
3 files changed, 27 insertions(+), 8 deletions(-)
mode change 100755 => 100644 distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
diff --git a/.env_sample b/.env_sample
index 1074965..592b593 100644
--- a/.env_sample
+++ b/.env_sample
@@ -12,6 +12,8 @@ POSTGRESTMP=/mnt/tmp/postgres
#dataverse
WEBANALYTICSON=true
TESTBANNER=true
+BASEURL="https://....blob.core.windows.net/data1"
+KEYWINDOWSBLOB=key
# Dataverse database settings
DATAVERSE_DB_HOST=postgres
@@ -60,6 +62,7 @@ doi_password=doipassword
dataciterestapiurlstring=https\:\/\/api.test.datacite.org
baseurlstring=https\:\/\/mds.test.datacite.org
+
# AWS settings
# https://guides.dataverse.org/en/latest/installation/config.html#id90
aws_config=/secrets/aws-cli/.aws/cloudian
diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml
index a005065..e6c45e1 100755
--- a/distros/dataverse.no/docker-compose.yaml
+++ b/distros/dataverse.no/docker-compose.yaml
@@ -193,6 +193,8 @@ services:
- "DATAVERSE_DB_NAME"
- "DATAVERSE_SERVICE_HOST"
- "DATAVERSE_URL"
+ - "BASEURL"
+ - "KEYWINDOWSBLOB"
- "SOLR_SERVICE_HOST"
- "SOLR_SERVICE_PORT"
- "CVM_SERVER_URL=https://ns.${traefikhost}"
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
old mode 100755
new mode 100644
index 8a4179c..faf5df1
--- a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
+++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
@@ -1,5 +1,11 @@
#!/bin/bash
+#BASEURL="https://....blob.core.windows.net/data1"
+#KEYWINDOWSBLOB=""
+
+BASEURL="$1"
+KEYWINDOWSBLOB="$2"
+
export PGPASSWORD=`cat /secrets/db/password`
cp -r /secrets/aws-cli/.aws ~
@@ -27,17 +33,25 @@ while true; do
line=$(head -n 1 /mnt/checkETagByFiles.txt)
IFS=' ' read -a arrayData <<< "$line"
- #echo ${arrayData[0]}
- s3ETag=$(aws s3api --endpoint https://$aws_endpoint head-object --bucket $aws_bucket_name --key ${arrayData[0]} 2> /dev/null | jq .ETag | sed 's/\"//g' | sed 's/\\//g')
+ #s3ETag=$(aws s3api --endpoint https://$aws_endpoint head-object --bucket $aws_bucket_name --key ${arrayData[0]} 2> /dev/null | jq .ETag | sed 's/\"//g' | sed 's/\\//g')
+ #curl -s "https://....blob.core.windows.net/data1?sp=r&st=2024-04-15T10:25:37Z&se=2024-04-15T18:25:37Z&spr=https&sv=2022-11-02&sr=c&sig=" -I -q | grep "Content-MD5:" | awk '{ print $2 }' | base64 -di | xxd -p
+ arrayData[0]=$(echo ${arrayData[0]} | sed -e 's/S3\:\/\/2002-yellow-dataverseno\://g')
+ md5BlobBase64=$(curl -s "${BASEURL}${arrayData[0]}${KEYWINDOWSBLOB}" -I -q | grep "Content-MD5: " | awk '{ print $2 }' | base64 -di)
+
+ if [ $? -eq 0 ]; then
+ md5Blob=$(echo "$md5BlobBase64" | xxd -p)
- if [ -z "${s3ETag}" ]; then
- echo "is not exist in the s3 storage: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile}
- else
+ #if [ -z "${s3ETag}" ]; then
+ if [ -z "${md5BlobBase64}" ]; then
+ echo "is not exist in the s3 storage: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile}
+ else
- if [ "${s3ETag}" != "${arrayData[1]}" ]; then
- echo "is not equal: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile}
+ #if [ "${s3ETag}" != "${arrayData[1]}" ]; then
+ if [ "${md5Blob}" != "${arrayData[1]}" ]; then
+ echo "is not equal: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile}
+ fi
fi
fi
@@ -54,4 +68,4 @@ while true; do
fi
done
-exit 0
\ No newline at end of file
+exit 0
From ad3d5eecd1d503f502e5b8f8c796c5657a35307a Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 23 Apr 2024 08:24:35 +0000
Subject: [PATCH 339/354] fixed checkETagByFiles.sh and curl call
---
distros/dataverse.no/init.d/201-bundle.sh | 2 +-
distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh | 0
2 files changed, 1 insertion(+), 1 deletion(-)
mode change 100644 => 100755 distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
diff --git a/distros/dataverse.no/init.d/201-bundle.sh b/distros/dataverse.no/init.d/201-bundle.sh
index 7f0c01c..004e259 100755
--- a/distros/dataverse.no/init.d/201-bundle.sh
+++ b/distros/dataverse.no/init.d/201-bundle.sh
@@ -1,5 +1,5 @@
#!/bin/bash
/usr/bin/apt-get install patch -y
-/usr/bin/curl -z -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch
+curl -z -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch
/usr/bin/patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch
diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh
old mode 100644
new mode 100755
From 11fb760cc1b369b36d026bd3f9ca1802211f5f1d Mon Sep 17 00:00:00 2001
From: Louis-wr <85620187+Louis-wr@users.noreply.github.com>
Date: Tue, 23 Apr 2024 11:01:56 +0000
Subject: [PATCH 340/354] fixed using gdcc tool
---
distros/dataverse.no/init.d/055-dvwebloader.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/distros/dataverse.no/init.d/055-dvwebloader.sh b/distros/dataverse.no/init.d/055-dvwebloader.sh
index a7da41a..a7ef293 100755
--- a/distros/dataverse.no/init.d/055-dvwebloader.sh
+++ b/distros/dataverse.no/init.d/055-dvwebloader.sh
@@ -1,4 +1,4 @@
#!/bin/bash
curl -X PUT -d 'native/http,dvwebloader' http://localhost:8080/api/admin/settings/:UploadMethods
-curl -X PUT -d 'https://gdcc.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl
-
+#curl -X PUT -d 'https://gdcc.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl
+curl -X PUT -d 'https://dataverseno.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl
From a3802cc2997df8196514960a32cb107895621e68 Mon Sep 17 00:00:00 2001
From: Benedikt Meier
Date: Wed, 24 Apr 2024 10:06:34 +0200
Subject: [PATCH 341/354] analytices script fix Issue
DataverseNO/docker.dataverse.no#70
---
.../dataverse.no/modification/analytics.xhtml | 313 +------------
.../modification/dataverse_footer.xhtml | 424 +++++++++++++++---
2 files changed, 376 insertions(+), 361 deletions(-)
diff --git a/distros/dataverse.no/modification/analytics.xhtml b/distros/dataverse.no/modification/analytics.xhtml
index d0bcdfa..a9d644b 100644
--- a/distros/dataverse.no/modification/analytics.xhtml
+++ b/distros/dataverse.no/modification/analytics.xhtml
@@ -1,303 +1,24 @@
+
-
-
-
-
-
-
-
\ No newline at end of file
+
+
+
+
+
+
diff --git a/distros/dataverse.no/modification/dataverse_footer.xhtml b/distros/dataverse.no/modification/dataverse_footer.xhtml
index d8b625b..087ae50 100644
--- a/distros/dataverse.no/modification/dataverse_footer.xhtml
+++ b/distros/dataverse.no/modification/dataverse_footer.xhtml
@@ -1,87 +1,178 @@
-
-