Last active
October 17, 2024 18:58
-
-
Save DonRichards/05636a03edfbdcb64ef225f8a6465ebc to your computer and use it in GitHub Desktop.
Upgrade Dataverse v5.14 to v6.0
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Globals | |
DOMAIN="dataverse-clone.mse.jhu.edu" | |
PAYARA_OLD="/usr/local/payara5" | |
PAYARA_NEW="/usr/local/payara6" | |
JAVA_UPGRADE_SCRIPT_URL="https://gist.github.com/DonRichards/cb992523a5ec588f1fb978d752d0d030/raw/upgrade_java.sh" | |
PAYARA_ZIP_URL="https://nexus.payara.fish/repository/payara-community/fish/payara/distributions/payara/6.2023.8/payara-6.2023.8.zip" | |
DATAVERSE_WAR_URL="https://github.com/IQSS/dataverse/releases/download/v6.0/dataverse-6.0.war" | |
SOLR_TAR_URL="https://archive.apache.org/dist/solr/solr/9.3.0/solr-9.3.0.tgz" | |
DVINSTALL_ZIP_URL="https://github.com/IQSS/dataverse/releases/download/v6.0/dvinstall.zip" | |
DATAVERSE_USER="dataverse" | |
SOLR_USER="solr" | |
CURRENT_VERSION="5.14" | |
TARGET_VERSION="6.0" | |
PAYARA_SERVICE_FILE="$(systemctl show -p FragmentPath payara.service | cut -d'=' -f2)" | |
if [[ -z "$PAYARA_SERVICE_FILE" ]]; then | |
printf " - Error: payara.service file path not found.\n" >&2 | |
return 1 | |
fi | |
SOLR_SERVICE_FILE="$(systemctl show -p FragmentPath solr.service | cut -d'=' -f2)" | |
if [[ -z "$SOLR_SERVICE_FILE" ]]; then | |
printf " - Error: solr.service file path not found.\n" >&2 | |
return 1 | |
fi | |
COUNTER_DAILY_SCRIPT="/etc/cron.daily/counter_daily.sh" | |
COUNTER_PROCESSOR_DIR="/usr/local/counter-processor-0.1.04" | |
DATAVERSE_FILE_DIRECTORY="/mnt/dvn/dv-content" | |
MAIL_HOST="your_smtp_server" | |
MAIL_USER="your_mail_user" | |
MAIL_FROM_ADDRESS="your_from_address" | |
# Ensure the script is not run as root | |
if [[ $EUID -eq 0 ]]; then | |
printf "Please do not run this script as root.\n" >&2 | |
printf "This script runs several commands with sudo from within functions.\n" >&2 | |
exit 1 | |
fi | |
# Function to undeploy the current Dataverse version | |
undeploy_dataverse() { | |
echo " - Undeploying current Dataverse version..." | |
if sudo -u "$DATAVERSE_USER" "$PAYARA_OLD/bin/asadmin" list-applications | grep -q "dataverse-$CURRENT_VERSION"; then | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_OLD/bin/asadmin" undeploy "dataverse-$CURRENT_VERSION"; then | |
echo " - Error undeploying Dataverse $CURRENT_VERSION." >&2 | |
return 1 | |
fi | |
else | |
echo " - Dataverse $CURRENT_VERSION is not currently deployed. Skipping undeploy step." | |
fi | |
} | |
# Function to stop Payara | |
stop_payara() { | |
local payara_path="$1" | |
echo " - Stopping Payara at $payara_path..." | |
if pgrep -f "payara" > /dev/null; then | |
if ! sudo -u "$DATAVERSE_USER" "$payara_path/bin/asadmin" stop-domain; then | |
echo " - Error stopping Payara." >&2 | |
return 1 | |
fi | |
else | |
echo " - Payara is already stopped." | |
fi | |
} | |
# Function to stop Solr | |
stop_solr() { | |
echo " - Stopping Solr service..." | |
if pgrep -f "solr" > /dev/null; then | |
if ! sudo systemctl stop solr; then | |
echo " - Error stopping Solr." >&2 | |
return 1 | |
fi | |
else | |
echo " - Solr is already stopped." | |
fi | |
} | |
# Waiting for payara to come back up. | |
wait_for_site() { | |
local url="https://${DOMAIN}/dataverse/root?q=" | |
local response_code | |
printf " - Waiting for site to become available...\n" | |
while true; do | |
# Get HTTP response code | |
response_code=$(curl -o /dev/null -s -w "%{http_code}" "$url") | |
if [[ "$response_code" -eq 200 ]]; then | |
printf " - Site is up (HTTP 200 OK).\n" | |
break | |
else | |
printf "\r - Waiting... (HTTP response: %s)" "$response_code" | |
fi | |
# Wait 1 seconds before checking again | |
sleep 1 | |
done | |
} | |
# Function to upgrade Java | |
upgrade_java() { | |
echo " - Upgrading Java..." | |
cd /tmp || return 1 | |
if ! curl -L -O "$JAVA_UPGRADE_SCRIPT_URL"; then | |
echo " - Error downloading Java upgrade script." >&2 | |
return 1 | |
fi | |
chmod +x upgrade_java.sh | |
if ! ./upgrade_java.sh; then | |
echo " - Error executing Java upgrade script." >&2 | |
return 1 | |
fi | |
rm -f upgrade_java.sh | |
} | |
# Function to download Payara 6 | |
download_payara() { | |
echo " - Downloading Payara 6..." | |
cd /tmp || return 1 | |
if ! curl -L -O "$PAYARA_ZIP_URL"; then | |
echo " - Error downloading Payara 6." >&2 | |
return 1 | |
fi | |
} | |
# Function to install Payara 6 | |
install_payara() { | |
echo " - Installing Payara 6..." | |
if ! sudo unzip -o "payara-6.2023.8.zip" -d /usr/local/; then | |
echo " - Error unzipping Payara 6." >&2 | |
return 1 | |
fi | |
sudo ln -sf /usr/local/payara6 /usr/local/payara | |
rm -f "payara-6.2023.8.zip" | |
} | |
# Function to configure Payara permissions | |
configure_payara_permissions() { | |
echo " - Configuring Payara permissions..." | |
if ! sudo chown -R "$DATAVERSE_USER" /usr/local/payara6; then | |
echo " - Error setting ownership for Payara 6." >&2 | |
return 1 | |
fi | |
} | |
# Function to migrate domain.xml and update configurations | |
migrate_domain_xml() { | |
local script_dir | |
script_dir=$(dirname "$(readlink -f "$0")") | |
local domain_xml_local="$script_dir/6_0_domain.xml" | |
local domain_xml_new="$PAYARA_NEW/glassfish/domains/domain1/config/domain.xml" | |
local domain_xml_old="$PAYARA_OLD/glassfish/domains/domain1/config/domain.xml" | |
local backup_file="${domain_xml_new}.orig" | |
# echo " - Checking for $domain_xml_local..." | |
# if [ ! -f "$domain_xml_local" ]; then | |
# echo " - Local domain.xml not found, exiting." | |
# return 1 | |
# fi | |
echo " - Migrating domain.xml and updating configurations..." | |
# # Backup the new domain.xml | |
# if ! sudo cp "$domain_xml_new" "$backup_file"; then | |
# echo " - Error backing up new domain.xml." >&2 | |
# return 1 | |
# fi | |
# Extract JVM options from the old domain.xml | |
local jvm_options | |
jvm_options=$(sudo grep -E 'dataverse|doi' "$domain_xml_old" | grep '<jvm-options>' | sed 's/.*<jvm-options>\(.*\)<\/jvm-options>.*/\1/') | |
# Use awk to insert JVM options safely into domain.xml | |
sudo awk -v jvm_options="$jvm_options" ' | |
/<\/java-config>/ { | |
print "<jvm-options>" jvm_options "</jvm-options>" | |
} | |
{ print } | |
' "$domain_xml_new" | sudo tee "$domain_xml_new.tmp" > /dev/null | |
if [ $? -ne 0 ]; then | |
echo " - Error inserting JVM options into new domain.xml." >&2 | |
return 1 | |
fi | |
# Move the tmp file to the correct location with sudo | |
if ! sudo mv "$domain_xml_new.tmp" "$domain_xml_new"; then | |
echo " - Error replacing the domain.xml file." >&2 | |
return 1 | |
fi | |
# Update file directory paths | |
if [ -z "$DATAVERSE_FILE_DIRECTORY" ]; then | |
echo " - Error: DATAVERSE_FILE_DIRECTORY is not set." >&2 | |
return 1 | |
fi | |
if ! sudo sed -i "s|-Ddataverse.files.file.directory=.*|-Ddataverse.files.file.directory=$DATAVERSE_FILE_DIRECTORY|" "$domain_xml_new"; then | |
echo " - Error updating file directory path in domain.xml." >&2 | |
return 1 | |
fi | |
echo " - domain.xml successfully migrated and updated." | |
} | |
# Function to migrate jhove files | |
migrate_jhove_files() { | |
echo " - Migrating jhove configuration files..." | |
local config_dir_old="$PAYARA_OLD/glassfish/domains/domain1/config" | |
local config_dir_new="$PAYARA_NEW/glassfish/domains/domain1/config" | |
if ! sudo rsync -av "$config_dir_old"/jhove* "$config_dir_new/"; then | |
printf " - Error migrating jhove files.\n" >&2 | |
return 1 | |
fi | |
# Update jhove.conf to reference payara6 | |
if ! sudo sed -i 's|payara5|payara6|' "$config_dir_new/jhove.conf"; then | |
echo " - Error updating jhove.conf." >&2 | |
return 1 | |
fi | |
if ! sudo chown -R "$DATAVERSE_USER" "$config_dir_new"/jhove*; then | |
echo " - Error setting ownership for jhove files." >&2 | |
return 1 | |
fi | |
} | |
# Function to migrate logos | |
migrate_logos() { | |
echo " - Migrating logos..." | |
local docroot_old="$PAYARA_OLD/glassfish/domains/domain1/docroot" | |
local docroot_new="$PAYARA_NEW/glassfish/domains/domain1/docroot" | |
if ! sudo -u "$DATAVERSE_USER" rsync -av "$docroot_old/logos/" "$docroot_new/logos/"; then | |
echo " - Error migrating logos." >&2 | |
return 1 | |
fi | |
} | |
# Function to migrate MDC logs | |
migrate_mdc_logs() { | |
echo " - Migrating MDC logs..." | |
local logs_old="$PAYARA_OLD/glassfish/domains/domain1/logs/mdc" | |
local logs_new="$PAYARA_NEW/glassfish/domains/domain1/logs/mdc" | |
if ! sudo rsync -av "$logs_old" "$logs_new"; then | |
echo " - Error migrating MDC logs." >&2 | |
return 1 | |
fi | |
} | |
# Function to update cron jobs and counter processor paths | |
update_cron_jobs() { | |
echo " - Updating cron jobs and counter processor paths..." | |
if ! sudo sed -i 's|payara5|payara6|' "$COUNTER_DAILY_SCRIPT"; then | |
echo " - Error updating $COUNTER_DAILY_SCRIPT." >&2 | |
return 1 | |
fi | |
if ! sudo find "$COUNTER_PROCESSOR_DIR" -type f -exec sed -i 's|/payara5/|/payara6/|g' {} +; then | |
echo " - Error updating counter processor paths." >&2 | |
return 1 | |
fi | |
} | |
# Function to update Payara service | |
update_payara_service() { | |
local backup_file="$PAYARA_SERVICE_FILE.bak" | |
printf " - Updating Payara service...\n" | |
# Validate the service file exists | |
if [[ ! -f "$PAYARA_SERVICE_FILE" ]]; then | |
printf " - Service file not found: %s\n" "$PAYARA_SERVICE_FILE" >&2 | |
return 1 | |
fi | |
# Create a backup of the service file | |
if ! sudo cp "$PAYARA_SERVICE_FILE" "$backup_file"; then | |
printf " - Error creating backup of %s.\n" "$PAYARA_SERVICE_FILE" >&2 | |
return 1 | |
fi | |
# Perform the sed replacement | |
if ! sudo sed -i 's|payara5|payara6|' "$PAYARA_SERVICE_FILE"; then | |
printf " - Error updating %s.\n" "$PAYARA_SERVICE_FILE" >&2 | |
return 1 | |
fi | |
# Reload systemd and restart the service with proper error handling | |
if ! sudo systemctl daemon-reload; then | |
printf " - Error reloading systemd daemon.\n" >&2 | |
return 1 | |
fi | |
if ! sudo systemctl stop payara; then | |
printf " - Error stopping Payara service.\n" >&2 | |
return 1 | |
fi | |
if ! sudo systemctl start payara; then | |
printf " - Error starting Payara service.\n" >&2 | |
return 1 | |
fi | |
printf " - Payara service updated and restarted successfully.\n" | |
return 0 | |
} | |
# Function to start Payara 6 | |
start_payara6() { | |
echo " - Starting Payara 6..." | |
if ! sudo systemctl start payara; then | |
echo " - Error starting Payara 6." >&2 | |
return 1 | |
fi | |
} | |
# Function to create JavaMail resource | |
create_javamail_resource() { | |
echo " - Creating JavaMail resource..." | |
local output | |
if ! output=$(sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" create-javamail-resource --mailhost "$MAIL_HOST" --mailuser "$MAIL_USER" --fromaddress "$MAIL_FROM_ADDRESS" mail/notifyMailSession 2>&1); then | |
if [[ "$output" == *"already exists with resource-ref"* ]]; then | |
echo " - JavaMail resource already exists. Proceeding..." | |
else | |
echo " - Error creating JavaMail resource: $output" >&2 | |
return 1 | |
fi | |
fi | |
} | |
# Function to create password aliases | |
create_password_aliases() { | |
echo " - Checking existing password aliases..." | |
# Check for existing aliases | |
local alias_list | |
alias_list=$(sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" list-password-aliases) | |
# Flags to track whether we need to create aliases | |
local db_alias_exists=false | |
local doi_alias_exists=false | |
# Check if dataverse.db.password alias exists | |
if echo "$alias_list" | grep -q "dataverse.db.password"; then | |
echo " - Database password alias 'dataverse.db.password' already exists." | |
db_alias_exists=true | |
fi | |
# Check if doi_password_alias alias exists | |
if echo "$alias_list" | grep -q "doi_password_alias"; then | |
echo " - DOI password alias 'doi_password_alias' already exists." | |
doi_alias_exists=true | |
fi | |
# If both aliases exist, no need to proceed further | |
if $db_alias_exists && $doi_alias_exists; then | |
echo " - Both password aliases already exist. No need to create new ones." | |
return 0 | |
fi | |
# Create missing database password alias | |
if ! $db_alias_exists; then | |
read -s -p "Enter database password: " DB_PASSWORD | |
echo | |
if [[ -z "$DB_PASSWORD" ]]; then | |
echo " - Error: Database password cannot be blank." >&2 | |
return 1 | |
fi | |
# Write to temporary file and create alias | |
echo "AS_ADMIN_ALIASPASSWORD=$DB_PASSWORD" > /tmp/dataverse.db.password.txt | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" create-password-alias dataverse.db.password --passwordfile /tmp/dataverse.db.password.txt; then | |
echo " - Error creating database password alias." >&2 | |
rm -f /tmp/dataverse.db.password.txt | |
return 1 | |
fi | |
rm -f /tmp/dataverse.db.password.txt | |
echo " - Database password alias 'dataverse.db.password' created successfully." | |
else | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" update-password-alias dataverse.db.password; then | |
echo " - Error creating database password alias." >&2 | |
rm -f /tmp/dataverse.db.password.txt | |
return 1 | |
fi | |
echo " - Database password alias 'dataverse.db.password' updated successfully." | |
fi | |
# Create missing DOI password alias (optional) | |
if ! $doi_alias_exists; then | |
read -s -p "Enter DOI password (if applicable, or press Enter to skip): " DOI_PASSWORD | |
echo | |
if [[ -n "$DOI_PASSWORD" ]]; then | |
echo "AS_ADMIN_ALIASPASSWORD=$DOI_PASSWORD" > /tmp/dataverse.doi.password.txt | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" create-password-alias doi_password_alias --passwordfile /tmp/dataverse.doi.password.txt; then | |
echo " - Error creating DOI password alias." >&2 | |
rm -f /tmp/dataverse.doi.password.txt | |
return 1 | |
fi | |
rm -f /tmp/dataverse.doi.password.txt | |
echo " - DOI password alias 'doi_password_alias' created successfully." | |
else | |
echo " - Skipping DOI password alias creation." | |
fi | |
fi | |
# Final check to ensure both aliases are created | |
alias_list=$(sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" list-password-aliases) | |
if ! echo "$alias_list" | grep -q "dataverse.db.password"; then | |
echo " - Error: Database password alias was not created." >&2 | |
return 1 | |
fi | |
if ! echo "$alias_list" | grep -q "doi_password_alias"; then | |
echo " - Error: DOI password alias was not created." >&2 | |
return 1 | |
fi | |
echo " - All required password aliases are set." | |
} | |
# Function to create JVM options and restart Payara | |
create_jvm_options() { | |
echo " - Creating JVM options..." | |
local output | |
if ! output=$(sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" create-jvm-options --add-opens=java.base/java.io=ALL-UNNAMED 2>&1); then | |
if [[ "$output" == *"already exists in the configuration"* ]]; then | |
echo " - JVM option already exists. Proceeding..." | |
else | |
echo " - Error creating JVM options: $output" >&2 | |
return 1 | |
fi | |
fi | |
echo " - Restarting Payara 6..." | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" stop-domain; then | |
echo " - Error stopping Payara 6." >&2 | |
return 1 | |
fi | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" start-domain; then | |
echo " - Error starting Payara 6." >&2 | |
return 1 | |
fi | |
} | |
# Function to create network listener | |
create_network_listener() { | |
echo " - Creating network listener..." | |
local output | |
if ! output=$(sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" create-network-listener --protocol http-listener-1 --listenerport 8009 --jkenabled true jk-connector 2>&1); then | |
if [[ "$output" == *"already exists"* ]]; then | |
echo " - Network listener 'jk-connector' already exists. Proceeding..." | |
else | |
echo " - Error creating network listener: $output" >&2 | |
return 1 | |
fi | |
fi | |
} | |
# Function to deploy Dataverse | |
deploy_dataverse() { | |
echo " - Deploying Dataverse..." | |
cd /tmp || return 1 | |
if ! wget "$DATAVERSE_WAR_URL"; then | |
echo " - Error downloading Dataverse WAR file." >&2 | |
return 1 | |
fi | |
if ! sudo cp "dataverse-$TARGET_VERSION.war" "/home/$DATAVERSE_USER/"; then | |
echo " - Error copying WAR file to /home/$DATAVERSE_USER/." >&2 | |
rm -f "dataverse-$TARGET_VERSION.war" | |
return 1 | |
fi | |
sudo chown "$DATAVERSE_USER:" "/home/$DATAVERSE_USER/dataverse-$TARGET_VERSION.war" | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" deploy "/home/$DATAVERSE_USER/dataverse-$TARGET_VERSION.war"; then | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" deploy "dataverse-$TARGET_VERSION"; then | |
echo -e " - Error deploying Dataverse WAR file.\n" >&2 | |
rm -f "dataverse-$TARGET_VERSION.war" | |
return 1 | |
fi | |
fi | |
rm -f "dataverse-$TARGET_VERSION.war" | |
} | |
# Function to check Dataverse version | |
check_dataverse_version() { | |
echo " - Checking Dataverse version..." | |
local version | |
version=$(curl -s "http://localhost:8080/api/info/version" | grep -oP '\d+\.\d+') | |
if [ "$version" == "$TARGET_VERSION" ]; then | |
echo " - Dataverse upgraded to version $TARGET_VERSION successfully." | |
else | |
echo " - Dataverse version check failed. Expected $TARGET_VERSION, got $version." >&2 | |
return 1 | |
fi | |
} | |
# Function to restart Payara 6 | |
restart_payara6() { | |
echo " - Restarting Payara 6..." | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" stop-domain; then | |
echo " - Error stopping Payara 6." >&2 | |
return 1 | |
fi | |
if ! sudo -u "$DATAVERSE_USER" "$PAYARA_NEW/bin/asadmin" start-domain; then | |
echo " - Error starting Payara 6." >&2 | |
return 1 | |
fi | |
} | |
# Function to download dvinstall | |
download_dvinstall() { | |
echo " - Downloading dvinstall..." | |
cd /tmp || return 1 | |
if ! wget "$DVINSTALL_ZIP_URL"; then | |
echo " - Error downloading dvinstall.zip." >&2 | |
return 1 | |
fi | |
if ! unzip -o dvinstall.zip; then | |
echo " - Error unzipping dvinstall.zip." >&2 | |
rm -f dvinstall.zip | |
return 1 | |
fi | |
rm -f dvinstall.zip | |
} | |
# Function to upgrade Solr | |
upgrade_solr() { | |
echo " - Upgrading Solr..." | |
if ! sudo mv /usr/local/solr "/usr/local/solr-8.11.1"; then | |
echo " - Error moving old Solr directory." >&2 | |
return 1 | |
fi | |
cd /tmp || return 1 | |
if ! wget "$SOLR_TAR_URL"; then | |
echo " - Error downloading Solr tarball." >&2 | |
return 1 | |
fi | |
if ! tar xvzf "solr-9.3.0.tgz"; then | |
echo " - Error extracting Solr tarball." >&2 | |
rm -f "solr-9.3.0.tgz" | |
return 1 | |
fi | |
if ! sudo mv "solr-9.3.0" "/usr/local/solr-9.3.0"; then | |
echo " - Error moving Solr directory." >&2 | |
rm -rf "solr-9.3.0" | |
rm -f "solr-9.3.0.tgz" | |
return 1 | |
fi | |
rm -f "solr-9.3.0.tgz" | |
if ! sudo ln -sf "/usr/local/solr-9.3.0" /usr/local/solr; then | |
echo " - Error creating Solr symlink." >&2 | |
return 1 | |
fi | |
} | |
# Function to update Solr configurations | |
update_solr_configs() { | |
echo " - Updating Solr configurations..." | |
if ! sudo rsync -avz "/usr/local/solr/server/solr/configsets/_default/" "/usr/local/solr/server/solr/collection1"; then | |
echo " - Error copying Solr configsets." >&2 | |
return 1 | |
fi | |
if ! sudo rsync -avz /tmp/dvinstall/schema*.xml "/usr/local/solr/server/solr/collection1/conf/"; then | |
echo " - Error copying Solr schema files." >&2 | |
return 1 | |
fi | |
if ! sudo cp "/tmp/dvinstall/solrconfig.xml" "/usr/local/solr/server/solr/collection1/conf/solrconfig.xml"; then | |
echo " - Error copying Solr solrconfig.xml." >&2 | |
return 1 | |
fi | |
} | |
# Function to update Jetty configuration | |
update_jetty_config() { | |
echo " - Updating Jetty configuration..." | |
local jetty_file="/usr/local/solr-9.3.0/server/etc/jetty.xml" | |
if ! sudo sed -i 's/\(<Set name="requestHeaderSize">.*default="\)[^"]*\("\)/\1102400\2/' "$jetty_file"; then | |
echo " - Error updating Jetty requestHeaderSize." >&2 | |
return 1 | |
fi | |
} | |
# Function to configure Solr core | |
configure_solr_core() { | |
echo " - Configuring Solr core..." | |
if ! sudo touch "/usr/local/solr/server/solr/collection1/core.properties"; then | |
echo " - Error creating core.properties." >&2 | |
return 1 | |
fi | |
echo "name=collection1" | sudo tee "/usr/local/solr/server/solr/collection1/core.properties" > /dev/null | |
if ! sudo chown -R "$SOLR_USER:" "/usr/local/solr-9.3.0/"; then | |
echo " - Error setting ownership for Solr directories." >&2 | |
return 1 | |
fi | |
} | |
# Function to update Solr service | |
update_solr_service() { | |
echo " - Updating Solr service..." | |
local solr_service_file="$SOLR_SERVICE_FILE" | |
if ! sudo sed -i '/^WorkingDirectory *= */s|/usr/local/solr$|/usr/local/solr|' "$solr_service_file"; then | |
echo " - Error updating WorkingDirectory in solr.service." >&2 | |
return 1 | |
fi | |
if ! sudo sed -i '/^ExecStart *= */s|/usr/local/solr/bin/solr|/usr/local/solr/bin/solr|' "$solr_service_file"; then | |
echo " - Error updating ExecStart in solr.service." >&2 | |
return 1 | |
fi | |
if ! sudo sed -i '/^ExecStop *= */s|/usr/local/solr/bin/solr|/usr/local/solr/bin/solr|' "$solr_service_file"; then | |
echo " - Error updating ExecStop in solr.service." >&2 | |
return 1 | |
fi | |
sudo systemctl daemon-reload | |
} | |
# Function to start Solr service | |
start_solr_service() { | |
echo " - Starting Solr service..." | |
if ! sudo systemctl start solr; then | |
echo " - Error starting Solr service." >&2 | |
return 1 | |
fi | |
# Verify Solr is running | |
if ! curl -s "http://localhost:8983/solr/collection1/schema/fields" > /dev/null; then | |
echo " - Error: Solr is not responding as expected." >&2 | |
return 1 | |
fi | |
} | |
# Function to update Solr schema | |
update_solr_schema() { | |
echo " - Updating Solr schema..." | |
cd /tmp || return 1 | |
if ! wget "https://guides.dataverse.org/en/6.0/_downloads/1158e888bffd60c8a89df32fe90f8181/update-fields.sh"; then | |
echo " - Error downloading update-fields.sh." >&2 | |
return 1 | |
fi | |
sudo chown "$SOLR_USER:" update-fields.sh | |
sudo chmod +x update-fields.sh | |
if ! sudo yum install -y ed; then | |
echo " - Error installing 'ed' editor." >&2 | |
return 1 | |
fi | |
if ! sudo -u "$SOLR_USER" bash -c 'curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/server/solr/collection1/conf/schema.xml'; then | |
echo " - Error updating Solr schema." >&2 | |
return 1 | |
fi | |
if ! sudo -u "$SOLR_USER" bash -c 'curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=collection1"'; then | |
echo " - Error reloading Solr core." >&2 | |
return 1 | |
fi | |
if ! curl -s "http://localhost:8080/api/admin/index" | jq > /dev/null; then | |
echo " - Error verifying Solr index." >&2 | |
return 1 | |
fi | |
} | |
CPU_THRESHOLD=80 # High CPU usage threshold | |
CHECK_INTERVAL=5 # Seconds between checks | |
# Function to get current CPU usage | |
get_cpu_usage() { | |
local cpu_idle; cpu_idle=$(top -bn1 | grep "Cpu(s)" | sed "s/.*, *\([0-9.]*\)%* id.*/\1/") | |
printf "%.0f\n" "$(echo "100 - $cpu_idle" | bc)" | |
} | |
# Main function to monitor CPU usage | |
monitor_cpu() { | |
while :; do | |
local timestamp; timestamp=$(date '+%Y-%m-%d %H:%M:%S') | |
local current_cpu; current_cpu=$(get_cpu_usage) | |
printf "\r[%s] Dataverse is running several update tasks, CPU usage: %s%%" "$timestamp" "$current_cpu" | |
if [[ $current_cpu -lt $CPU_THRESHOLD ]]; then | |
printf "\n[%s] Task has completed. CPU usage is back to normal.\n" "$timestamp" | |
return | |
fi | |
sleep "$CHECK_INTERVAL" | |
done | |
} | |
# Main function | |
main() { | |
# Ensure Payara environment variables are set | |
export PAYARA="$PAYARA_NEW" | |
echo -e "\nStep 1: Undeploy the existing Dataverse version" | |
if ! undeploy_dataverse; then | |
printf " - Error during undeploy.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 2: Stop Payara 5" | |
if ! stop_payara "$PAYARA_OLD"; then | |
printf " - Error stopping Payara 5.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 3: Stop Solr" | |
if ! stop_solr; then | |
printf " - Error stopping Solr.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 4: Upgrade Java" | |
if ! upgrade_java; then | |
printf " - Error upgrading Java.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 5: Download Payara 6" | |
if ! download_payara; then | |
printf " - Error downloading Payara 6.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 6: Install Payara 6" | |
if ! install_payara; then | |
printf " - Error installing Payara 6.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 7: Configure Payara permissions" | |
if ! configure_payara_permissions; then | |
printf " - Error configuring Payara permissions.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 8: Migrate domain.xml" | |
if ! migrate_domain_xml; then | |
printf " - Error migrating domain.xml.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 9: Migrate jhove files" | |
if ! migrate_jhove_files; then | |
printf " - Error migrating jhove files.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 10: Migrate logos" | |
if ! migrate_logos; then | |
printf " - Error migrating logos.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 11: Migrate MDC logs" | |
if ! migrate_mdc_logs; then | |
printf " - Error migrating MDC logs.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 12: Update cron jobs and counter processor paths" | |
if ! update_cron_jobs; then | |
printf " - Error updating cron jobs.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 13: Update Payara service" | |
if ! update_payara_service; then | |
printf " - Error updating Payara service.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 14: Start Payara 6" | |
if ! start_payara6; then | |
printf " - Error starting Payara 6.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 15: Create JavaMail resource" | |
if ! create_javamail_resource; then | |
printf " - Error creating JavaMail resource.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 16: Create password aliases" | |
if ! create_password_aliases; then | |
printf " - Error creating password aliases.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 17: Create JVM options and restart Payara" | |
if ! create_jvm_options; then | |
printf " - Error creating JVM options.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 18: Create network listener" | |
if ! create_network_listener; then | |
printf " - Error creating network listener.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 19: Deploy Dataverse" | |
if ! deploy_dataverse; then | |
printf " - Error deploying Dataverse.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nWait for Payara to come up." | |
wait_for_site | |
echo -e "\nStep 11 2nd part: Migrate MDC logs" | |
echo " - Setting MDC path in config" | |
curl -X PUT -d "$logs_new" http://localhost:8080/api/admin/settings/:MDCLogPath || return 1 | |
echo -e "\nStep 20: Check Dataverse version" | |
if ! check_dataverse_version; then | |
printf " - Error checking Dataverse version.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 21: Restart Payara 6" | |
if ! restart_payara6; then | |
printf " - Error restarting Payara 6.\n" >&2 | |
exit 1 | |
fi | |
echo -e "\nStep 22: Download dvinstall" | |
if ! download_dvinstall; then | |
printf " - Error downloading dvinstall.\n" >&2 | |
exit 1 | |
fi | |
echo "Solr Step 1: Upgrade Solr" | |
if ! upgrade_solr; then | |
printf " - Error upgrading Solr.\n" >&2 | |
exit 1 | |
fi | |
echo "Solr Step 2: Update Solr configurations" | |
if ! update_solr_configs; then | |
printf " - Error updating Solr configurations.\n" >&2 | |
exit 1 | |
fi | |
echo "Solr Step 3: Update Jetty configuration" | |
if ! update_jetty_config; then | |
printf " - Error updating Jetty configuration.\n" >&2 | |
exit 1 | |
fi | |
echo "Solr Step 4: Configure Solr core" | |
if ! configure_solr_core; then | |
printf " - Error configuring Solr core.\n" >&2 | |
exit 1 | |
fi | |
echo "Solr Step 5: Update Solr service" | |
if ! update_solr_service; then | |
printf " - Error updating Solr service.\n" >&2 | |
exit 1 | |
fi | |
echo "Solr Step 6: Start Solr service" | |
if ! start_solr_service; then | |
printf " - Error starting Solr service.\n" >&2 | |
exit 1 | |
fi | |
echo "Solr Step 7: Update Solr schema" | |
if ! update_solr_schema; then | |
printf " - Error updating Solr schema.\n" >&2 | |
exit 1 | |
fi | |
printf "\n\nUpgrade to Dataverse %s completed successfully.\n\n" "$TARGET_VERSION" | |
# Not needed. | |
monitor_cpu | |
} | |
# Run the main function | |
main "$@" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment