Created
June 30, 2021 14:10
-
-
Save alroba/967307415c3e51709e2c776ceef6e281 to your computer and use it in GitHub Desktop.
Telefonica import onprem script
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/bash | |
# Before running the import: | |
# - create a normal custmer installation of docker onprem (with dataservices configured etc) | |
# - docker-compose down | |
# - copy password encryption secrets from `app_config.yml` in the source installation and set them in `.env` | |
# CARTO_BUILDER_SECRET_TOKEN='value-from-old-onprem' | |
# CARTO_BUILDER_SECRET_KEY_BASE='value-from-old-onprem' | |
# CARTO_BUILDER_PASSWORD_SECRET='value-from-old-onprem' | |
# | |
# MAKE SURE only one CARTO_BUILDER_PASSWORD_SECRET exists in .env , remove the one copied from central | |
# | |
# usage: import_onprem.sh <path_to_onprem_data.tgz> | |
set -ex | |
## ARGS | |
PATH_TO_EXPORTED_DATA=$1 | |
if [ -z "$1" ]; then | |
echo "path to onprem_data.tgz is required" | |
exit 1 | |
fi | |
## ENV VARS | |
DOCKER_VOLUMES_PATH=/var/lib/docker/volumes | |
source .env | |
## get u/gid for current instalation | |
REDIS_UID=$(stat -c '%u' ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data) | |
POSTGRESQL_UID=$(stat -c '%u' ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data) | |
UPLOADS_UID=$(stat -c '%u' ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data) | |
REDIS_GID=$(stat -c '%g' ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data) | |
POSTGRESQL_GID=$(stat -c '%g' ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data) | |
UPLOADS_GID=$(stat -c '%g' ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data) | |
# copy exported data and set ownership | |
#tar -xzvif $PATH_TO_EXPORTED_DATA | |
# | |
# | |
#if [ -d "data/redis" ]; then | |
# # tar generated in old onprem 4.2 | |
# cp -pr ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data.backup | |
# rm -rf ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data/* | |
# cp -pr data/redis/dump.rdb ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data | |
# chown -R $REDIS_UID:$REDIS_GID ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data | |
# | |
# cp -pr ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data.backup | |
# rm -rf ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data/* | |
# cp -pr data/postgresql/* ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data | |
# chown -R $POSTGRESQL_UID:$POSTGRESQL_GID ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data | |
# | |
# cp -pr ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data.backup | |
# rm -rf ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data/* | |
# cp -pr data/shared/uploads/* ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data | |
# chown -R $UPLOADS_UID:$UPLOADS_GID ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data | |
# | |
#else | |
# # tar generated from docker onprem installation | |
# cp -pr ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data.backup | |
# rm -rf ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data/* | |
# cp -pr var/lib/docker/volumes/${COMPOSE_PROJECT_NAME}_redis/_data/dump.rdb ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data | |
# chown -R $REDIS_UID:$REDIS_GID ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_redis/_data | |
# | |
# cp -pr ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data.backup | |
# rm -rf ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data/* | |
# cp -pr var/lib/docker/volumes/${COMPOSE_PROJECT_NAME}_postgresql/_data/* ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data | |
# chown -R $POSTGRESQL_UID:$POSTGRESQL_GID ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_postgresql/_data | |
# | |
# cp -pr ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data.backup | |
# rm -rf ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data/* | |
# cp -pr var/lib/docker/volumes/${COMPOSE_PROJECT_NAME}_builderuploads/_data/* ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data | |
# chown -R $UPLOADS_UID:$UPLOADS_GID ${DOCKER_VOLUMES_PATH}/${COMPOSE_PROJECT_NAME}_builderuploads/_data | |
# | |
#fi | |
# docker-compose up -d | |
docker-compose up -d | |
sleep 60 | |
# Update users in postgresql and redis | |
## Update database_host | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres cartodb_production -Atc "update users set database_host = 'postgresql'" | |
USERNAMES=$(docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres cartodb_production -Atc "select username from users") | |
for username in $USERNAMES; do | |
docker exec ${COMPOSE_PROJECT_NAME}_redis_1 redis-cli -n 5 HSET "rails:users:${username}" "database_host" postgresql | |
done | |
# Get users databases | |
USER_DBS=$(docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres -Atc "SELECT datname FROM pg_database WHERE datname like '%cartodb_user%'") | |
for user_db in $USER_DBS; do | |
# Update extensions | |
for extension in cartodb cdb_dataservices_client crankshaft odbc_fdw plpgsql plproxy plpython3u postgis postgis_raster postgis_topology postgres_fdw | |
do | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "CREATE EXTENSION IF NOT EXISTS $extension" | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "ALTER EXTENSION $extension UPDATE" | |
done | |
# Setup cdb_conf | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "SELECT CDB_Conf_SetConf('fdws', '{\"aggregation\":{\"server\":{\"extensions\":\"postgis\", \"dbname\":\"dataservices_db\", \"host\":\"dataservices\", \"port\":\"5433\"}, \"users\":{\"public\":{\"user\":\"fdw_user\",\"password\":\"3BYSYd7DsdvaV2yNRhhBHceAXDpMZxQ\"} } } }')" | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "SELECT CDB_Conf_SetConf('groups_api', '{\"host\": \"builder\", \"port\": 80, \"timeout\": 10, \"username\": \"extension\", \"password\": \"elephant\"}')" | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "SELECT CDB_Conf_SetConf('geocoder_server_config', '{ \"connection_str\": \"host=dataservices port=5433 dbname=dataservices_db user=geocoder_api\"}')" | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "SELECT CDB_Conf_SetConf('invalidation_service', '{\"enabled\":false,\"host\":\"\",\"port\":null,\"retries\":5,\"critical\":false,\"timeout\":5,\"trigger_verbose\":true}')" | |
# Update fdw config | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "alter user MAPPING FOR public SERVER aggregation OPTIONS ( set user 'fdw_user')" | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "alter user MAPPING FOR public SERVER aggregation OPTIONS ( set password '3BYSYd7DsdvaV2yNRhhBHceAXDpMZxQ')" | |
docker exec ${COMPOSE_PROJECT_NAME}_postgresql_1 psql -U postgres $user_db -Atc "alter server aggregation options (set host 'dataservices', set port '5433', set dbname 'dataservices_db')" | |
done | |
## Update varnish invalidation function | |
docker exec ${COMPOSE_PROJECT_NAME}_builder_1 bundle exec rake cartodb:db:load_varnish_trigger['','','postgresql',''] | |
## Ensure connector providers creation in user databases | |
docker exec ${COMPOSE_PROJECT_NAME}_builder_1 bundle exec rake cartodb:connectors:create_providers |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment