Skip to content

Instantly share code, notes, and snippets.

@knbknb
Last active September 23, 2024 13:32
Show Gist options
  • Save knbknb/66c372c2d4d5e5d4bb765c37412b8331 to your computer and use it in GitHub Desktop.
Save knbknb/66c372c2d4d5e5d4bb765c37412b8331 to your computer and use it in GitHub Desktop.
jq: scriptfragments for mDIS
#!/usr/bin/env bash
# Set default value for INTERMEDIATE_FILE
DEFAULT_INTERMEDIATE_FILE="../pseudocolumns-per-table.json"
# Check if an argument is provided, otherwise use the default value
INTERMEDIATE_FILE=${1:-$DEFAULT_INTERMEDIATE_FILE}
if [[ ! -f "$1" ]]; then
echo "Filename '$1' does not exist, writing JSON to '$DEFAULT_INTERMEDIATE_FILE'"
fi
output_file="alter_tables.sql"
result=()
for f in $(ls -1 *.json); do
table=$(jq -r '.table' < ./$f )
echo $table
columns=$(jq -r '.columns[] | select(.pseudoCalc != "") | .name' < ./$f | jq -R . | jq -s .)
result+=("$(jq -n --arg filename "$f" --arg table "$table" --argjson columns "$columns" '{filename: $filename, table: $table, columns: $columns}')")
done
echo "${result[@]}" | jq -s . > $INTERMEDIATE_FILE
if [[ ! -f "$INTERMEDIATE_FILE" ]]; then
echo "Intermediate JSON file $INTERMEDIATE_FILE could not be written."
exit 1
fi
# Generate ALTER TABLE statements and save to a SQL file
jq -r '
.[] |
.table as $table |
.columns[] |
"ALTER TABLE `" + $table + "` ADD COLUMN `" + . + "` POINT;"
' "$INTERMEDIATE_FILE" > $output_file
echo "" >> $output_file
echo "#############################" >> $output_file
echo "For dropping columns:" >> $output_file
echo "#############################" >> $output_file
echo "" >> $output_file
jq -r '
.[] |
.table as $table |
.columns[] |
"ALTER TABLE `" + $table + "` DROP COLUMN `" + . + "`;"
' "$INTERMEDIATE_FILE" >> $output_file
echo "ALTER TABLE statements have been saved to $output_file"
echo ""
echo "# now we can run the SQL file to add the columns to the tables,"
echo "# See alter_database.sh for an example of how to do this."
#!/usr/bin/env bash
# This script demonstrates how to make a copy of a MySQL database dump
# and modify it for import into a new database.
#
ALTER_FILE="alter_tables.sql"
DUMP_FILE=$1
if [[ ! -f "$DUMP_FILE" ]]; then
echo "Input file '$DUMP_FILE' does not exist."
exit 1
fi
if [[ ! -f "$ALTER_FILE" ]]; then
echo "Extra SQL file '$ALTER_FILE' does not exist."
exit 1
fi
# Extract SOURCE_DB from the SQL file name
SOURCE_DB=$(echo "$DUMP_FILE" | grep -oP '(?<=--)[^--]+(?=\.sql)')
# Replace "mdis_" with "work_" to create TARGET_DB
TARGET_DB=$(echo "$SOURCE_DB" | sed 's/^mdis_/work_/')
# Print the extracted SOURCE_DB
echo "SOURCE_DB: $SOURCE_DB"
echo "TARGET_DB: $TARGET_DB"
# Step 1: Make a copy of the mysqldump file and modify it
cp $DUMP_FILE "$TARGET_DB.sql"
# Step 2: Replace "USE $SOURCE_DB" with "USE $TARGET_DB" in the new dump file
sed -i "s/USE .$SOURCE_DB.;/USE $TARGET_DB;/" "$TARGET_DB.sql"
# Step 3: (Optional) Inject SQL fragments into the dump file for calculated columns
# You can insert your SQL code directly before the end of the file or at any strategic location
# Example: appending SQL for altering tables at the end of the dump file
cat $ALTER_FILE >> "$TARGET_DB.sql"
# Step 4: Import the modified dump into MySQL devserver
echo "mysql -u root -p -h wb31 -P 8508 -d $TARGET_DB < "$TARGET_DB.sql""
# Step 5: (Optional) Apply ALTER TABLE statements separately (if not injected in step 3)
# Uncomment if using external file for ALTER TABLEs
# mysql -u root -p $TARGET_DB < "alter_statements.sql"
echo "mysql -u root -p -h wb31 -P 8508 -d $TARGET_DB < "$ALTER_FILE""
echo "Database $TARGET_DB imported and modified successfully."
# knb Sept 2024
# find "required" columns in MDIS template
jq '.columns[] | select(.pseudoCalc != "") | .name' < ./SurveyingTestSeries.json
ls *.json | xargs -I {} sh -c 'jq -r ".columns[] | select(.required == true) | \"{} \(.name)\"" < {}'
# same as a script + for loop:
#!/bin/bash
# filenames passed as arguments
for file in "$@"; do
jq -r ".columns[] | select(.required == true) | \"${file} \(.name)\"" < "$file"
done
# Generate a list of all fields in a form and their data types (is in validators array, sometimes in formInput array)
# < $HOME/code/git/dis-gfz-2024/backend/dis_templates/defaults/forms/location.json jq '.fields[] | {(.name): (.validators | map(.type) | join(", "))}'
< location.json jq '.fields[] | {(.name): (.validators | map(.type) | join(", "))}' | grep ":"
# Generate a list of all pseudoCalc fields in a model
#!/usr/bin/env bash
INPUT_FILE=../pseudocolumns-per-table.json
output_file=../alter-table-statements.sql
result=()
for f in $(ls -1 *.json); do
table=$(jq -r '.table' < ./$f )
echo $table
columns=$(jq -r '.columns[] | select(.pseudoCalc != "") | .name' < ./$f | jq -R . | jq -s .)
result+=("$(jq -n --arg filename "$f" --arg table "$table" --argjson columns "$columns" '{filename: $filename, table: $table, columns: $columns}')")
done
echo "${result[@]}" | jq -s . > $INPUT_FILE
if [[ ! -f "$INPUT_FILE" ]]; then
echo "Input file $INPUT_FILE does not exist."
exit 1
fi
# Generate ALTER TABLE statements and save to a SQL file
jq -r '
.[] |
.table as $table |
.columns[] |
"ALTER TABLE `" + $table + "` ADD COLUMN `" + . + "` POINT;"
' "$INPUT_FILE" > $output_file
echo "" >> $output_file
echo "#############################" >> $output_file
echo "For dropping columns:" >> $output_file
echo "#############################" >> $output_file
echo "" >> $output_file
jq -r '
.[] |
.table as $table |
.columns[] |
"ALTER TABLE `" + $table + "` DROP COLUMN `" + . + "`;"
' "$INPUT_FILE" >> $output_file
echo "ALTER TABLE statements have been saved to $output_file"
#jq '[.[] | select(.columns | length > 0)]' < pseudocolumns-per-table.json > pseudocolumns-tables.json
#{
# "filename": "SurveyingTestSeries.json",
# "columns": [
# "borehole_diameter",
# "casing_diameter",
# "test_type_conducted",
# "equipment_used"
# ]
#}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment