Skip to content

Instantly share code, notes, and snippets.

@DonRichards
Last active October 17, 2024 18:59
Show Gist options
  • Save DonRichards/4e887505c9f824ec3d8fc3c4cf7d5dfb to your computer and use it in GitHub Desktop.
Save DonRichards/4e887505c9f824ec3d8fc3c4cf7d5dfb to your computer and use it in GitHub Desktop.
Upgrade Dataverse v6.1 to v6.2
#!/bin/bash
# Used release to generate this: https://github.com/IQSS/dataverse/releases/tag/v6.2
# Globals
DOMAIN="dataverse-clone.mse.jhu.edu"
PAYARA="/usr/local/payara"
SOLR_SCHEMA_URL="https://raw.githubusercontent.com/IQSS/dataverse/v6.2/conf/solr/9.3.0/schema.xml"
SOLR_SCHEMA_FILE=$(basename "$SOLR_SCHEMA_URL")
DATAVERSE_WAR_URL="https://github.com/IQSS/dataverse/releases/download/v6.2/dataverse-6.2.war"
DATAVERSE_WAR_FILE="/home/dataverse/dataverse-6.2.war"
DATAVERSE_WAR_HASH="d0c8c62025457e35333ec7c9bf896355ffeb3b6823020da5f53599b72f399d2e"
GEOSPATIAL_URL="https://github.com/IQSS/dataverse/releases/download/v6.2/geospatial.tsv"
GEOSPATIAL_FILE="/tmp/geospatial.tsv"
CITATION_URL="https://github.com/IQSS/dataverse/releases/download/v6.2/citation.tsv"
CITATION_FILE="/tmp/citation.tsv"
ASTROPHYSICS_URL="https://github.com/IQSS/dataverse/releases/download/v6.2/astrophysics.tsv"
ASTROPHYSICS_FILE="/tmp/astrophysics.tsv"
BIOMEDICAL_URL="https://github.com/IQSS/dataverse/releases/download/v6.2/biomedical.tsv"
BIOMEDICAL_FILE="/tmp/biomedical.tsv"
SOLR_FIELD_UPDATER_URL="https://raw.githubusercontent.com/IQSS/dataverse/refs/tags/v6.2/conf/solr/9.3.0/update-fields.sh"
SOLR_FIELD_UPDATER_FILE="/tmp/update-fields.sh"
DEPLOY_DIR="$PAYARA/glassfish/domains/domain1/generated"
CURRENT_VERSION="6.1"
TARGET_VERSION="6.2"
DATAVERSE_USER="dataverse"
BASHRC_FILE="/home/dataverse/.bashrc"
PAYARA_EXPORT_LINE="export PAYARA=\"$PAYARA\""
RATE_LIMIT_JSON_FILE="rate-limit-actions-setting.json"
# Ensure the script is not run as root
if [[ $EUID -eq 0 ]]; then
printf "Please do not run this script as root.\n" >&2
printf "This script runs several commands with sudo from within functions.\n" >&2
exit 1
fi
check_current_version() {
local version response
response=$(sudo -u dataverse $PAYARA/bin/asadmin list-applications)
# Check if "No applications are deployed to this target server" is part of the response
if [[ "$response" == *"No applications are deployed to this target server"* ]]; then
printf " - No applications are deployed to this target server. Assuming upgrade is needed.\n"
return 0
fi
# If no such message, check the Dataverse version via the API
version=$(curl -s "http://localhost:8080/api/info/version" | grep -oP '\d+\.\d+')
# Check if the version matches the expected current version
if [[ $version == "$CURRENT_VERSION" ]]; then
return 0
else
printf " - Current Dataverse version is not %s. Upgrade cannot proceed.\n" "$CURRENT_VERSION" >&2
return 1
fi
}
# Function to undeploy the current Dataverse version
undeploy_dataverse() {
if sudo -u dataverse $PAYARA/bin/asadmin list-applications | grep -q "dataverse-$CURRENT_VERSION"; then
printf " - Undeploying current Dataverse version...\n"
sudo -u dataverse $PAYARA/bin/asadmin undeploy dataverse-$CURRENT_VERSION || return 1
else
printf " - Dataverse is not currently deployed. Skipping undeploy step.\n"
fi
}
# Function to stop Payara service
stop_payara() {
if pgrep -f payara > /dev/null; then
printf " - Stopping Payara service...\n"
sudo systemctl stop payara || return 1
else
printf " - Payara is already stopped.\n"
fi
}
stop_solr() {
if pgrep -f solr > /dev/null; then
printf " - Stopping Solr service...\n"
sudo systemctl stop solr || return 1
else
printf " - Solr is already stopped.\n"
fi
}
start_solr() {
if ! pgrep -f solr > /dev/null; then
printf " - Starting Solr service...\n"
sudo systemctl start solr || return 1
else
printf " - Solr is already running.\n"
fi
}
# Function to start Payara service
start_payara() {
if ! pgrep -f payara > /dev/null; then
printf " - Starting Payara service...\n"
sudo systemctl start payara || return 1
else
printf " - Payara is already running.\n"
fi
}
# Function to clean generated directory
clean_generated_dir() {
if [[ -d "$DEPLOY_DIR" ]]; then
printf " - Removing generated directory...\n"
sudo rm -rf "$DEPLOY_DIR" || return 1
else
printf " - Generated directory already clean. Skipping.\n"
fi
}
download_war_file() {
if [[ -f "$DATAVERSE_WAR_FILE" ]]; then
printf " - WAR file already exists at %s. Skipping download.\n" "$DATAVERSE_WAR_FILE"
ACTUAL_HASH=$(sudo -u dataverse shasum -a 256 "$DATAVERSE_WAR_FILE" | awk '{print $1}')
if [ "$ACTUAL_HASH" != "$DATAVERSE_WAR_HASH" ]; then
echo "Hash mismatch!"
rm -f $DATAVERSE_WAR_FILE
else
echo "Hash matches!"
return 0
fi
fi
printf " - WAR file not found or its hash didn't match. Downloading...\n"
sudo rm -f "$DATAVERSE_WAR_FILE"
if ! sudo -u dataverse curl -L -o "$DATAVERSE_WAR_FILE" "$DATAVERSE_WAR_URL"; then
printf " - Error downloading the WAR file.\n" >&2
return 1
fi
printf " - Download completed successfully.\n"
sudo chown dataverse:dataverse $DATAVERSE_WAR_FILE
ACTUAL_HASH=$(sudo -u dataverse shasum -a 256 "$DATAVERSE_WAR_FILE" | awk '{print $1}')
if [ "$ACTUAL_HASH" != "$DATAVERSE_WAR_HASH" ]; then
echo "Hash mismatch!"
return 1
else
echo "Hash matches!"
fi
}
download_solr_schema_file() {
if [[ -f "$SOLR_SCHEMA_FILE" ]]; then
printf " - SOLR_SCHEMA file already exists at %s. Skipping download.\n" "$SOLR_SCHEMA_FILE"
else
printf " - SOLR_SCHEMA file not found. Downloading...\n"
if curl -L -o "$SOLR_SCHEMA_FILE" "$SOLR_SCHEMA_URL"; then
printf " - SOLR_SCHEMA file downloaded successfully to %s\n" "$SOLR_SCHEMA_FILE"
else
printf " - Error downloading the SOLR_SCHEMA file. Exiting script.\n"
return 1
fi
fi
}
update_solr_schema_file() {
if [[ -f "$SOLR_SCHEMA_FILE" ]]; then
printf " - Solr schema file found. Uploading...\n"
sudo cp /usr/local/solr/server/solr/collection1/conf/schema.xml /usr/local/solr/server/solr/collection1/conf/schema.xml_$(date +"%Y%m%d") || return 1
sudo chown solr:solr $SOLR_SCHEMA_FILE || return 1
if ! sudo cp $SOLR_SCHEMA_FILE /usr/local/solr/server/solr/collection1/conf/schema.xml ; then
printf " - Error copying with the Solr schema file.\n" >&2
return 1
fi
printf " - Update completed successfully.\n"
sudo rm -f $SOLR_SCHEMA_FILE
else
printf " - Solr schema file is missing at %s.\n" "$SOLR_SCHEMA_FILE"
return 1
fi
}
download_geospatial_file() {
if [[ -f "$GEOSPATIAL_FILE" ]]; then
printf " - Geospatial file already exists at %s. Skipping download.\n" "$GEOSPATIAL_FILE"
else
printf " - Geospatial file not found. Downloading...\n"
if sudo -u dataverse bash -c "curl -L -o \"$GEOSPATIAL_FILE\" \"$GEOSPATIAL_URL\""; then
printf " - Geospatial file downloaded successfully to %s\n" "$GEOSPATIAL_FILE"
else
printf " - Error downloading the Geospatial file. Exiting script.\n" >&2
return 1
fi
fi
}
update_geospatial_metadata_block() {
if [[ -f "$GEOSPATIAL_FILE" ]]; then
printf " - Geospatial file found. Uploading...\n"
# Capture the curl output
local response
response=$(sudo -u dataverse curl -s http://localhost:8080/api/admin/datasetfield/load \
-H "Content-type: text/tab-separated-values" \
-X POST --upload-file "$GEOSPATIAL_FILE")
# Check if "The requested resource is not available" is in the response
if echo "$response" | grep -q "The requested resource is not available"; then
printf " - Error: The requested resource is not available.\n" >&2
return 1
fi
# Check for any other curl errors
if [[ $? -ne 0 ]]; then
printf " - Error updating with the Geospatial file.\n" >&2
return 1
fi
printf " - Update completed successfully.\n"
sudo rm -f "$GEOSPATIAL_FILE"
else
printf " - Geospatial file is missing at %s.\n" "$GEOSPATIAL_FILE"
return 1
fi
}
download_citation_file() {
if [[ -f "$CITATION_FILE" ]]; then
printf " - Citation file already exists at %s. Skipping download.\n" "$CITATION_FILE"
else
printf " - Citation file not found. Downloading...\n"
if sudo -u dataverse bash -c "curl -L -o \"$CITATION_FILE\" \"$CITATION_URL\""; then
printf " - Citation file downloaded successfully to %s\n" "$CITATION_FILE"
else
printf " - Error downloading the Citation file. Exiting script.\n"
return 1
fi
fi
}
update_citation_metadata_block() {
if [[ -f "$CITATION_FILE" ]]; then
printf " - Citation file found. Uploading...\n"
# Capture the curl output
local response
response=$(sudo -u dataverse curl -s http://localhost:8080/api/admin/datasetfield/load \
-H "Content-type: text/tab-separated-values" \
-X POST --upload-file "$CITATION_FILE")
# Check if "The requested resource is not available" is in the response
if echo "$response" | grep -q "The requested resource is not available"; then
printf " - Error: The requested resource is not available.\n" >&2
return 1
fi
# Check for any other curl errors
if [[ $? -ne 0 ]]; then
printf " - Error updating with the Citation file.\n" >&2
return 1
fi
printf " - Update completed successfully.\n"
sudo rm -f "$CITATION_FILE"
else
printf " - Citation file is missing at %s.\n" "$CITATION_FILE"
return 1
fi
}
download_astrophysics_file() {
if [[ -f "$ASTROPHYSICS_FILE" ]]; then
printf " - ASTROPHYSICS file already exists at %s. Skipping download.\n" "$ASTROPHYSICS_FILE"
else
printf " - ASTROPHYSICS file not found. Downloading...\n"
if sudo -u dataverse bash -c "curl -L -o \"$ASTROPHYSICS_FILE\" \"$ASTROPHYSICS_URL\""; then
printf " - ASTROPHYSICS file downloaded successfully to %s\n" "$ASTROPHYSICS_FILE"
else
printf " - Error downloading the ASTROPHYSICS file. Exiting script.\n"
return 1
fi
fi
}
update_astrophysics_metadata_block() {
if [[ -f "$ASTROPHYSICS_FILE" ]]; then
printf " - ASTROPHYSICS file found. Uploading...\n"
# Capture the curl output
local response
response=$(sudo -u dataverse curl -s http://localhost:8080/api/admin/datasetfield/load \
-H "Content-type: text/tab-separated-values" \
-X POST --upload-file "$ASTROPHYSICS_FILE")
# Check if "The requested resource is not available" is in the response
if echo "$response" | grep -q "The requested resource is not available"; then
printf " - Error: The requested resource is not available.\n" >&2
return 1
fi
# Check for any other curl errors
if [[ $? -ne 0 ]]; then
printf " - Error updating with the ASTROPHYSICS file.\n" >&2
return 1
fi
printf " - Update completed successfully.\n"
sudo rm -f "$ASTROPHYSICS_FILE"
else
printf " - ASTROPHYSICS file is missing at %s.\n" "$ASTROPHYSICS_FILE"
return 1
fi
}
download_biomedical_file() {
if [[ -f "$BIOMEDICAL_FILE" ]]; then
printf " - BIOMEDICAL file already exists at %s. Skipping download.\n" "$BIOMEDICAL_FILE"
else
printf " - BIOMEDICAL file not found. Downloading...\n"
if sudo -u dataverse bash -c "curl -L -o \"$BIOMEDICAL_FILE\" \"$BIOMEDICAL_URL\""; then
printf " - BIOMEDICAL file downloaded successfully to %s\n" "$BIOMEDICAL_FILE"
else
printf " - Error downloading the BIOMEDICAL file. Exiting script.\n"
return 1
fi
fi
}
update_biomedical_metadata_block() {
if [[ -f "$BIOMEDICAL_FILE" ]]; then
printf " - BIOMEDICAL file found. Uploading...\n"
# Capture the curl output
local response
response=$(sudo -u dataverse curl -s http://localhost:8080/api/admin/datasetfield/load \
-H "Content-type: text/tab-separated-values" \
-X POST --upload-file "$BIOMEDICAL_FILE")
# Check if "The requested resource is not available" is in the response
if echo "$response" | grep -q "The requested resource is not available"; then
printf " - Error: The requested resource is not available.\n" >&2
return 1
fi
# Check for any other curl errors
if [[ $? -ne 0 ]]; then
printf " - Error updating with the BIOMEDICAL file.\n" >&2
return 1
fi
printf " - Update completed successfully.\n"
sudo rm -f "$BIOMEDICAL_FILE"
else
printf " - BIOMEDICAL file is missing at %s.\n" "$BIOMEDICAL_FILE"
return 1
fi
}
# Download and update Solr schema updater
download_solr_schema_updater() {
if [[ -f "$SOLR_FIELD_UPDATER_FILE" ]]; then
printf " - Solr Field Updater file already exists at %s. Skipping download.\n" "$SOLR_FIELD_UPDATER_FILE"
else
printf " - Solr Field Updater file not found. Downloading...\n"
if sudo -u solr bash -c "curl -L -o \"$SOLR_FIELD_UPDATER_FILE\" \"$SOLR_FIELD_UPDATER_URL\""; then
printf " - Solr Field Updater file downloaded successfully to %s\n" "$SOLR_FIELD_UPDATER_FILE"
else
printf " - Error downloading the Solr Field Updater file. Exiting script.\n"
return 1
fi
fi
if ! sudo chmod +x $SOLR_FIELD_UPDATER_FILE; then
printf " - Error running chmod on $SOLR_FIELD_UPDATER_FILE"
return 1
fi
if ! sudo chown solr:solr $SOLR_FIELD_UPDATER_FILE; then
printf " - Error running chown on $SOLR_FIELD_UPDATER_FILE"
return 1
fi
}
update_solr_schema_updater() {
if [[ -f "$SOLR_FIELD_UPDATER_FILE" ]]; then
printf " - Solr file found. Uploading...\n"
if ! sudo -u solr bash -c "curl http://localhost:8080/api/admin/index/solr/schema | bash $SOLR_FIELD_UPDATER_FILE /usr/local/solr/server/solr/collection1/conf/schema.xml" ; then
printf " - Error updating with the Solr fields from update-fields script.\n" >&2
return 1
fi
printf " - Update completed successfully.\n"
sudo rm -f $SOLR_FIELD_UPDATER_FILE
else
printf " - Solr file is missing at %s.\n" "$SOLR_FIELD_UPDATER_FILE"
return 1
fi
# New field that caused problems: Software Description "license" field.
# This is a dropdown controlled vocab list.
# Add this line to the schema.xml file.
# <field name="license" type="keyword" indexed="true" stored="true" multiValued="false" />
# <!-- JHU-managed fieldNames --> wasn't copied over
if ! grep -q '<field name="license"' '/usr/local/solr/server/solr/collection1/conf/schema.xml'; then
sudo sed -i '/<!-- SCHEMA-FIELDS::END -->/a <field name="license" type="string" indexed="true" stored="true" multiValued="false" />' /usr/local/solr/server/solr/collection1/conf/schema.xml
fi
# curl "http://localhost:8983/solr/admin/cores?action=STATUS"
}
deploy_new_version() {
if ! sudo -u dataverse $PAYARA/bin/asadmin list-applications | grep -q "dataverse-$TARGET_VERSION"; then
printf " - Deploying new Dataverse version...\n"
sudo -u dataverse $PAYARA/bin/asadmin deploy "$DATAVERSE_WAR_FILE" || return 1
else
printf " - Dataverse version %s is already deployed. Skipping deployment.\n" "$TARGET_VERSION"
fi
}
export_all_metadata() {
sudo -u dataverse curl http://localhost:8080/api/admin/metadata/reExportAll || return 1
}
status_solr() {
while true; do
result=$(sudo -u solr bash -c "curl -s http://localhost:8983/solr/admin/cores?action=STATUS")
# Check for initFailures
init_failure=$(echo "$result" | jq -r '.initFailures.collection1 // empty')
if [[ -n "$init_failure" ]]; then
echo -e "\nError: Solr initialization failure detected:\n$init_failure" >&2
return 1
fi
# Extract current, numDocs, and maxDoc values
current=$(echo "$result" | jq '.status.collection1.index.current')
numDocs=$(echo "$result" | jq '.status.collection1.index.numDocs')
maxDoc=$(echo "$result" | jq '.status.collection1.index.maxDoc')
# Display progress on the same line
printf "\rIndexing progress: numDocs=%s, maxDoc=%s" "$numDocs" "$maxDoc"
# Check if indexing is current
if [[ "$current" == "true" ]]; then
echo -e "\nIndexing complete."
break
fi
sleep 1
done
}
# Waiting for payara to come back up.
wait_for_site() {
local url="https://${DOMAIN}/dataverse/root?q="
local response_code
printf " - Waiting for site to become available...\n"
while true; do
# Get HTTP response code
response_code=$(curl -o /dev/null -s -w "%{http_code}" "$url")
if [[ "$response_code" -eq 200 ]]; then
printf " - Site is up (HTTP 200 OK).\n"
break
else
printf "\r - Waiting... (HTTP response: %s)" "$response_code"
fi
# Wait 1 seconds before checking again
sleep 1
done
}
reindex_solr() {
# Call Solr for status
status_solr
sudo -u dataverse curl -X DELETE http://localhost:8080/api/admin/index/timestamps || return 1
sudo -u dataverse curl http://localhost:8080/api/admin/index/continue || return 1
sudo -u dataverse curl http://localhost:8080/api/admin/index/status || return 1
echo " - Waiting for solr to complete it's reindex"
status_solr
}
set_rate_limit() {
# Define the JSON file and the API endpoint
API_ENDPOINT="http://localhost:8080/api/admin/settings/:RateLimitingCapacityByTierAndAction"
# Ensure the JSON file exists
if [[ ! -f "$RATE_LIMIT_JSON_FILE" ]]; then
printf " - No JSON file %s not found.\n" "$RATE_LIMIT_JSON_FILE" >&2
printf " - For more info: https://guides.dataverse.org/en/6.2/installation/config.html#rate-limiting\n"
else
# Use curl to send the contents of the JSON file with a PUT request
curl "$API_ENDPOINT" -X PUT -d @"$RATE_LIMIT_JSON_FILE" -H "Content-Type: application/json" | jq || return 1
printf " - Request sent using JSON file: %s\n" "$RATE_LIMIT_JSON_FILE"
fi
}
update_set_permalink() {
# Switch type from FAKE to datacite
# https://guides.dataverse.org/en/6.2/installation/config.html#dataverse-pid-type
local commands=(
'-Ddataverse.pid.perma1.type=FAKE'
'-Ddataverse.pid.perma1.label=PermaLink'
'-Ddataverse.pid.perma1.authority=10.7281'
'-Ddataverse.pid.perma1.shoulder=T1'
'-Ddataverse.pid.perma1.permalink.base-url=https\://dataverse-clone.mse.jhu.edu'
'-Ddataverse.pid.perma1.permalink.separator=\/'
'-Ddataverse.pid.perma1.permalink.identifier-generation-style=randomString'
'-Ddataverse.pid.default-provider=perma1'
'-Ddataverse.pid.providers=perma1'
)
local cmd
if ! sudo -u dataverse /usr/local/payara/bin/asadmin list-jvm-options | grep -F "Ddataverse.pid.providers=perma1"; then
for cmd in "${commands[@]}"; do
if ! sudo -u dataverse /usr/local/payara/bin/asadmin create-jvm-options "$cmd"; then
printf "Error: Command failed -> %s\n" "$cmd" >&2
return 1
fi
done
printf "All commands executed successfully.\n"
else
printf "Permalinks already set, nothing to do.\n"
fi
}
replace_doi_with_DOI() {
local file="/usr/local/payara6/glassfish/domains/domain1/applications/dataverse-6.2/dataset.xhtml"
local target_line=595
# Ensure the file exists
if [[ ! -f "$file" ]]; then
printf "Error: File %s not found.\n" "$file" >&2
return 1
fi
# Replace 'doi' with 'DOI' only on line 595 using sed
sudo sed -i "${target_line}s/DatasetPage\.doi/DatasetPage\.DOI/" "$file"
# Verify if the replacement was successful
if ! grep -q 'DatasetPage\.DOI' "$file"; then
printf "Error: Replacement failed.\n" >&2
return 1
fi
printf "Replacement successful in file: %s\n" "$file"
}
main() {
echo "Pre-req: ensure Payara environment variables are set"
export PAYARA="$PAYARA"
echo "Checking $BASHRC_FILE for payara export"
sleep 2
if ! sudo -u dataverse grep -qF "$PAYARA_EXPORT_LINE" "$BASHRC_FILE"; then
printf " - Line not found in .bashrc. Adding it...\n"
sudo bash -c "echo -e '\n$PAYARA_EXPORT_LINE' >> $BASHRC_FILE"
printf " - Line added to .bashrc.\n"
else
printf " - Line already exists in .bashrc. Skipping addition.\n"
fi
echo -e "\nCheck if Dataverse is running the correct version"
sleep 2
if ! check_current_version; then
printf " - Failed to find $CURRENT_VERSION deployed.\n" >&2
exit 1
fi
echo -e "\nStep 1: Update Solr schema.xml"
printf " - Stopping Solr.\n"
if ! stop_solr; then
printf " - Step 1: Error stopping Solr.\n" >&2
exit 1
fi
sleep 2
echo -e " - Downloading the Solr schema files"
if ! download_solr_schema_file; then
printf " - Step 1: Failed to download Solr's schema. Exiting script.\n" >&2
exit 1
fi
echo -e " - Updating the Solr schema files"
if ! update_solr_schema_file; then
printf " - Step 1: Failed to copy Solr's schema to solr. Exiting script.\n" >&2
exit 1
fi
sleep 2
printf " - Starting Solr.\n"
if ! start_solr; then
printf " - Step 1: Error starting Solr.\n" >&2
exit 1
fi
echo -e "\nStep 2: Undeploy the existing version"
sleep 2
if ! undeploy_dataverse; then
printf " - Step 2: Error during undeploy.\n" >&2
exit 1
fi
echo -e "\nStep 3: Stop Payara and clean directories"
sleep 2
if ! stop_payara || ! clean_generated_dir; then
printf " - Step 3: Error stopping Payara or cleaning generated directories.\n" >&2
exit 1
fi
echo -e "\nStep 4: Start Payara and deploy the new version"
sleep 2
if ! start_payara; then
printf " - Step 4: Error starting Payara.\n" >&2
exit 1
fi
echo -e "\nStep 5: Download WAR file."
sleep 2
if ! download_war_file; then
printf " - Step 5: Failed to download WAR file. Exiting script.\n" >&2
exit 1
fi
echo -e "\nStep 5: Deploying WAR file."
if ! deploy_new_version; then
printf " - Step 5: Error deploying new version.\n" >&2
exit 1
fi
echo -e "\nWait for Payara to come up."
wait_for_site
echo -e "\nStep 6:For installations with internationalization: Please remember to update translations via Dataverse language packs."
echo -e " - Step 6: Skipped"
# File are automatically pulled into /usr/local/payara6/glassfish/domains/domain1/applications/dataverse-6.2/WEB-INF/classes/propertyFiles/
echo -e "\nStep 7: Restart Payara"
sleep 2
if ! stop_payara || ! start_payara; then
printf " - Step 7: Error restarting Payara after deployment.\n" >&2
exit 1
fi
echo -e "\nWait for Payara to come up."
wait_for_site
echo -e "\nStep 8: Update the following Metadata Blocks to reflect the incremental improvements made to the handling of core metadata fields:"
echo -e "Step 8: Update Geospatial Metadata Block"
sleep 2
if ! download_geospatial_file; then
printf " - Step 8: Failed to download geospatial file. Exiting script.\n" >&2
exit 1
fi
if ! update_geospatial_metadata_block; then
printf " - Step 8: Failed to update geospatial metadata block. Exiting script.\n" >&2
exit 1
fi
echo -e "\nStep 8: Update Citation Metadata Block"
sleep 2
if ! download_citation_file; then
printf " - Step 8: Failed to download citation file. Exiting script.\n" >&2
exit 1
fi
if ! update_citation_metadata_block; then
printf " - Step 8: Failed to update citation metadata block. Exiting script.\n" >&2
exit 1
fi
echo -e "\nStep 8: Update atrophysics Metadata Block"
sleep 2
if ! download_astrophysics_file; then
printf " - Step 8: Failed to download atrophysics file. Exiting script.\n" >&2
exit 1
fi
if ! update_astrophysics_metadata_block; then
printf " - Step 8: Failed to update atrophysics metadata block. Exiting script.\n" >&2
exit 1
fi
echo -e "\nStep 8: Update biomedical Metadata Block"
sleep 2
if ! download_biomedical_file; then
printf " - Step 8: Failed to download biomedical file. Exiting script.\n" >&2
exit 1
fi
if ! update_biomedical_metadata_block; then
printf " - Step 8: Failed to update biomedical metadata block. Exiting script.\n" >&2
exit 1
fi
echo -e "\nStep 8: Run ReExportAll to update dataset metadata exports."
sleep 2
if ! export_all_metadata; then
printf " - Step 8: Error exporting all metadata.\n" >&2
exit 1
fi
echo -e "\nStep 9: For installations with custom or experimental metadata blocks:"
if ! stop_solr; then
printf " - Step 9: Error stopping Solr.\n" >&2
exit 1
fi
sleep 2
if ! download_solr_schema_updater; then
printf " - Step 9: Failed to download Solr's schema. Exiting script.\n" >&2
exit 1
fi
if ! update_solr_schema_updater; then
printf " - Step 9: Failed to update Solr schema. Exiting script.\n" >&2
exit 1
fi
sleep 2
printf " - Starting Solr.\n"
if ! start_solr; then
printf " - Step 9: Error starting Solr.\n" >&2
exit 1
fi
echo -e "\n\nStep 9: Run Solr's reindex."
if ! reindex_solr; then
printf " - Step 8: Error reindexing solr.\n" >&2
exit 1
fi
echo -e "\nAdditional Step: set rate limits."
if ! set_rate_limit; then
printf " - Additional Step: Error setting rate limits.\n" >&2
exit 1
fi
echo -e "Additional Step: set permalink."
if ! update_set_permalink; then
printf " - Additional Step: Error setting permlink configs.\n" >&2
exit 1
fi
echo -e "Additional Step: Fix when MDC is displayed"
if ! replace_doi_with_DOI; then
printf " - Additional Step: Error Fix when MDC is displayed #10463.\n" >&2
printf " - IQSS/10462 - https://github.com/IQSS/dataverse/issues/10907" >&2
exit 1
fi
echo -e "Additional Step: Restart Payara"
sleep 2
if ! stop_payara || ! start_payara; then
printf " - Step 7: Error restarting Payara after deployment.\n" >&2
exit 1
fi
sleep 2
printf "\n\nUpgrade to Dataverse %s completed successfully.\n\n" "$TARGET_VERSION"
}
# Run the main function
main "$@"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment