Skip to content

Commit

Permalink
split db service into and init and and update
Browse files Browse the repository at this point in the history
  • Loading branch information
lbailey-ucsf committed Feb 24, 2025
1 parent ec6b275 commit 0dcb1df
Showing 1 changed file with 14 additions and 55 deletions.
69 changes: 14 additions & 55 deletions .tugboat/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ services:
SET GLOBAL wait_timeout=28800;
SET GLOBAL max_allowed_packet=1073741824;
SET GLOBAL net_buffer_length=1048576;
SET GLOBAL innodb_buffer_pool_size=1073741824;"
SET GLOBAL innodb_buffer_pool_size=1073741824;"

- echo "max_allowed_packet=1073741824" >> /etc/mysql/conf.d/tugboat.cnf

- export MYSQL_ROOT_PASSWORD=${MOODLE_DBPASS}
- export MYSQL_DATABASE=${MOODLE_DBNAME}
Expand All @@ -95,70 +97,27 @@ services:
# Install AWS CLI
echo "Installing AWS CLI..."
pip install awscli
#pip3 install awscli
# Configure AWS credentials
echo "Configuring AWS credentials..."
export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY}
export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_KEY}
#export AWS_DEFAULT_REGION=us-west-1
fi
update:
- echo "*** updating database ***"
- |
if [ "${GENERATE_TEST_DATA:-true}" = "false" ]; then
# Download database from S3
echo "Downloading database from S3..."
aws s3 cp ${MOODLE_PREVIEW_DB_DOWNLOAD_URL} /tmp/database.sql.gz
# Verify MySQL settings
echo "Verifying MySQL settings..."
PACKET_SIZE=$(mysql -h db -u ${MOODLE_DBUSER} -p${MOODLE_DBPASS} -N -e "SELECT @@max_allowed_packet;")
echo "Current max_allowed_packet size: $((PACKET_SIZE/1024/1024))MB"
# Create import function with error handling
import_with_retry() {
local input_file=$1
local start_line=${2:-1}
local chunk_size=50000
local total_lines=$(gunzip -c "$input_file" | wc -l)
local current_line=$start_line
local temp_file="/tmp/import_chunk.sql"
local log_file="/tmp/import.log"
echo "Total lines in dump: $total_lines"
echo "Starting from line: $current_line"
while [ $current_line -le $total_lines ]; do
echo "Processing chunk starting at line $current_line..."
# Extract chunk of SQL
gunzip -c "$input_file" | sed -n "${current_line},$((current_line + chunk_size))p" > "$temp_file"
#PACKET_SIZE=$(mysql -h db -u ${MOODLE_DBUSER} -p${MOODLE_DBPASS} -N -e "SELECT @@max_allowed_packet;")
PACKET_SIZE=$(mysql -u ${MOODLE_DBUSER} -p${MOODLE_DBPASS} ${MOODLE_DBNAME} -N -e "SELECT @@max_allowed_packet;")
echo "Current max_allowed_packet size: $PACKET_SIZE"
# Try to import chunk
if mysql -h db -u ${MOODLE_DBUSER} -p${MOODLE_DBPASS} ${MOODLE_DBNAME} < "$temp_file" 2>> "$log_file"; then
echo "Successfully imported chunk"
else
echo "Error importing chunk. See $log_file for details."
echo "You can retry the import starting from line $current_line"
return 1
fi
current_line=$((current_line + chunk_size + 1))
rm "$temp_file"
done
echo "Import completed successfully"
return 0
}
# Attempt import
echo "Starting database import..."
#if import_with_retry "/tmp/database.sql.gz"; then
# echo "Database import successful"
#else
# echo "Error during import. Check logs for details."
# echo "Last error from MySQL:"
# tail -n 5 /tmp/import.log
# exit 1
#fi
# Import the database
#echo "Importing database..."
gunzip /tmp/database.sql.gz | mysql -u ${MOODLE_DBUSER} -p${MOODLE_DBPASS} ${MOODLE_DBNAME}
Expand All @@ -172,6 +131,6 @@ services:
rm -rf /tmp/venv
else
echo "Skipping db import. Genertaing testing data..."
echo "Skipping db import. Will generte testing data instead..."
exit 0
fi

0 comments on commit 0dcb1df

Please sign in to comment.