Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions docker/dev-env/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@ RUN apt-get update && \
apt-get upgrade -y && \
apt-get install -y --no-install-recommends bash zip unzip wget libtcnative-1\
tzdata tini ca-certificates openssl libapr1 libpq-dev curl gnupg\
vim libarchive-tools postgresql-common libmimalloc2.0

vim libarchive-tools postgresql-common libmimalloc2.0 libarchive-tools


RUN /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
Expand Down
1 change: 1 addition & 0 deletions dotCMS/src/main/docker/original/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ RUN apt update && \
libmimalloc2.0 \
openssl \
libapr1 \
libarchive-tools \
libpq-dev && \
rm -rf /var/lib/apt/lists/*

Expand Down
233 changes: 233 additions & 0 deletions dotCMS/src/main/docker/original/ROOT/srv/10-import-env.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
#!/bin/bash -e


## Drops the contents of the dotCMS database in preperation for a new import (only if requested)
drop_db_tables () {
echo "- DOT_IMPORT_DROP_DB - attempting to drop db schema"
psql -h "${DB_HOST}" -d "${DB_NAME}" -U "${DB_USERNAME}" -c "DROP SCHEMA public CASCADE;CREATE SCHEMA public;GRANT ALL ON SCHEMA public TO public;"
}

## This checks active connections to the dotCMS database - we can only proceed if there are no connections
check_active_connections() {
# Export password for psql (avoids password prompt)

ACTIVE=$(psql -h "$DB_HOST" -d "$DB_NAME" -U "$DB_USERNAME" -qtAX -c \
"SELECT count(*) FROM pg_stat_activity
WHERE datname = '$DB_NAME'
AND pid != pg_backend_pid()
AND state != 'idle'" 2>/dev/null || echo "0")

if [ "$ACTIVE" -gt 0 ]; then
echo "ERROR: Database has $ACTIVE active connections"
echo "Cannot import while database is in use"
echo "This script is designed for initial deployment only"
echo "Stop all pods before performing refresh"
exit 1
fi
}

## Imports the dotcms_db.sql.gz file into the postgres database specified by the DB_BASE_URL environment variable.
import_postgres () {

if [ -s $DB_BACKUP_FILE ]; then
# Check if database already has data (inode table exists with records)
INODE_COUNT=$(psql -h "${DB_HOST}" -d "${DB_NAME}" -U "${DB_USERNAME}" -qtAX -c \
"SELECT CASE WHEN EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = 'inode')
THEN (SELECT count(*) FROM inode) ELSE 0 END" 2>/dev/null | tr -d '[:space:]')

if [ -n "$INODE_COUNT" ] && [ "$INODE_COUNT" -gt 0 ]; then
echo "- Database already contains data ($INODE_COUNT inodes), skipping import."
return 0
fi

# Run the query using psql
cat $DB_BACKUP_FILE | gzip -d | psql -h "${DB_HOST}" -d "${DB_NAME}" -U "${DB_USERNAME}"

fi

}


## Downloads the assets.zip and dotcms_db.sql.gz files from the specified environment.
download_dotcms_db_assets () {

# If these are 0 length files, delete them
if [ ! -s $ASSETS_BACKUP_FILE ] ; then
rm -rf $ASSETS_BACKUP_FILE
fi

if [ ! -s $DB_BACKUP_FILE ] ; then
rm -rf $DB_BACKUP_FILE
fi

echo "- Pulling Environment from $DOT_IMPORT_ENVIRONMENT"

if [ -n "$DOT_IMPORT_API_TOKEN" ]; then
echo "- Using Authorization: Bearer"
export AUTH_HEADER="Authorization: Bearer $DOT_IMPORT_API_TOKEN"
elif [ -n "$DOT_IMPORT_USERNAME_PASSWORD" ]; then
echo "- Using Authorization: Basic"
export AUTH_HEADER="Authorization: Basic $(echo -n $DOT_IMPORT_USERNAME_PASSWORD | base64)"
fi


mkdir -p $SHARED_DATA_DIR/assets
chown -R dotcms:dotcms $SHARED_DATA_DIR || echo "cannot chown"

if [ -s "$ASSETS_BACKUP_FILE" ] && [ -s $DB_BACKUP_FILE ]; then

echo "- DB and Assets backups exist, skipping"
echo "- Delete $ASSETS_BACKUP_FILE and $DB_BACKUP_FILE to force a re-download"
return
fi

if [ ! -s "$ASSETS_BACKUP_FILE" ]; then
rm -rf $ASSETS_BACKUP_FILE.tmp
echo "- Downloading ASSETS from ${DOT_IMPORT_ENVIRONMENT}"

wget --no-check-certificate --header="$AUTH_HEADER" -t 1 -O ${ASSETS_BACKUP_FILE}.tmp ${DOT_IMPORT_ENVIRONMENT}/api/v1/maintenance/_downloadAssets\?oldAssets=${DOT_IMPORT_ALL_ASSETS}\&maxSize=${DOT_IMPORT_MAX_ASSET_SIZE}
if [ -s ${ASSETS_BACKUP_FILE}.tmp ]; then
mv ${ASSETS_BACKUP_FILE}.tmp $ASSETS_BACKUP_FILE
else
rm -rf ${ASSETS_BACKUP_FILE}.tmp
echo "asset download failed, please check your credentials and try again"
exit 1
fi
fi

if [ ! -f "$DB_BACKUP_FILE" ]; then
echo "- Downloading database"
rm -rf ${DB_BACKUP_FILE}.tmp
wget --no-check-certificate --header="$AUTH_HEADER" -t 1 -O ${DB_BACKUP_FILE}.tmp "${DOT_IMPORT_ENVIRONMENT}/api/v1/maintenance/_downloadDb" || exit 1
if [ -s ${DB_BACKUP_FILE}.tmp ]; then
mv ${DB_BACKUP_FILE}.tmp $DB_BACKUP_FILE
else
rm -rf ${DB_BACKUP_FILE}.tmp
echo "database download failed, please check your credentials and try again"
exit 1
fi
fi

unset AUTH_HEADER

}

## Unpacks the assets.zip file if it exists and has not been unpacked
unpack_assets(){
if [ ! -s "$ASSETS_BACKUP_FILE" ]; then
return 0
fi

echo "- Extracting assets.zip"
local tar_lang="${DOT_IMPORT_TAR_LANG:-C.UTF-8}"
local DOT_IMPORT_IGNORE_ASSET_ERRORS=${DOT_IMPORT_IGNORE_ASSET_ERRORS:-"true"}


if ! LANG="$tar_lang" bsdtar -xf "$ASSETS_BACKUP_FILE" -C "$SHARED_DATA_DIR"; then
if [ "${DOT_IMPORT_IGNORE_ASSET_ERRORS}" = "true" ]; then
echo "WARNING: assets extraction reported errors; continuing due to DOT_IMPORT_IGNORE_ASSET_ERRORS=true"
return 0
fi
return 1
fi
}



#### Script main()

export DOT_IMPORT_DROP_DB=${DOT_IMPORT_DROP_DB:-"false"}
export DOT_IMPORT_NON_LIVE_ASSETS=${DOT_IMPORT_NON_LIVE_ASSETS:-"false"}
export DOT_IMPORT_MAX_ASSET_SIZE=${DOT_IMPORT_MAX_ASSET_SIZE:-"100mb"}
export SHARED_DATA_DIR=${SHARED_DATA_DIR:-"/data/shared"}
export IMPORT_DATA_DIR=${IMPORT_DATA_DIR:-"$SHARED_DATA_DIR/import"}
export IMPORT_COMPLETE=$IMPORT_DATA_DIR/import_complete.txt
# Extract hostname and database name from JDBC URL (jdbc:postgresql://host/dbname)
export DB_HOST="${DB_BASE_URL#jdbc:postgresql://}" # Remove prefix -> host/dbname
export DB_HOST="${DB_HOST%%/*}" # Remove /dbname -> host
export DB_NAME="${DB_BASE_URL##*/}" # Remove everything before last / -> dbname
export PGPASSWORD="${DB_PASSWORD}"

# Clear the password from environment on exit
trap "unset PGPASSWORD" EXIT

if [ -z "$DOT_IMPORT_ENVIRONMENT" ]; then
exit 0
fi

if [ -z "$DOT_IMPORT_API_TOKEN" -a -z "$DOT_IMPORT_USERNAME_PASSWORD" ]; then
echo "- Set DOT_IMPORT_ENVIRONMENT, DOT_IMPORT_USERNAME_PASSWORD and/or DOT_IMPORT_API_TOKEN to import from another environment on first run"
exit 0
fi

if [ -z $DB_BASE_URL ]; then
echo "DB_BASE_URL environment variable not set, cannot continue without importing database"
exit 0
fi

mkdir -p $IMPORT_DATA_DIR

export DOT_IMPORT_HOST="${DOT_IMPORT_ENVIRONMENT#http://}"; DOT_IMPORT_HOST="${DOT_IMPORT_HOST#https://}"; DOT_IMPORT_HOST="${DOT_IMPORT_HOST%%/*}"; DOT_IMPORT_HOST="${DOT_IMPORT_HOST%%:*}"

# Exit normally if already cloned
if [ -f "$IMPORT_COMPLETE" ]; then
echo "dotCMS environment already inited. Delete ${IMPORT_COMPLETE} to import again."
exit 0
fi

LOCK_DIR="$IMPORT_DATA_DIR/.lock"
if mkdir "$LOCK_DIR" 2>/dev/null; then
trap "rm -rf '$LOCK_DIR' 2>/dev/null; unset PGPASSWORD" EXIT
else
# Get lock file age in minutes (portable for Linux and macOS)
if stat -c %Y "$LOCK_DIR" >/dev/null 2>&1; then
FILE_MTIME=$(stat -c %Y "$LOCK_DIR") # Linux
else
FILE_MTIME=$(stat -f %m "$LOCK_DIR") # macOS
fi
CURRENT_TIME=$(date +%s)
LOCK_AGE_MINUTES=$(( (CURRENT_TIME - FILE_MTIME) / 60 ))

# Check if import process file is older than 30 minutes (stale lock)
if [ "$LOCK_AGE_MINUTES" -ge 30 ]; then
echo "ERROR: Import process appears stale (lock file is ${LOCK_AGE_MINUTES} minutes old). Removing lock file."
rm -rf "$LOCK_DIR"
exit 1
fi
echo "ERROR: Lock found: ${LOCK_DIR} (${LOCK_AGE_MINUTES} minutes old)."
echo " Delete lock file or wait until it's 30 minutes old and try again"
echo " sleeping for 3m"
sleep 180
exit 1
fi


HASHED_ENV=$(echo -n "$DOT_IMPORT_ENVIRONMENT" | tr -cs 'a-zA-Z0-9' '_')


export ASSETS_BACKUP_FILE="${IMPORT_DATA_DIR}/${HASHED_ENV}_assets.zip"
export DB_BACKUP_FILE="${IMPORT_DATA_DIR}/${HASHED_ENV}_dotcms_db.sql.gz"

# Step 1. download db and assets (if needed)
download_dotcms_db_assets || { echo "Unable to download dotcms backup"; exit 1; }

# Step 2. wipe database clean (if requested)
if [ "$DOT_IMPORT_DROP_DB" = "true" ]; then
drop_db_tables || { echo "unable to drop the dotcms db schema"; exit 1; }
fi

# Step 3. import postgres db
import_postgres || { echo "Unable to import postgres backup"; exit 1; }

# Step 4. unpack assets.zip
unpack_assets || { echo "Unable to unzip assets"; exit 1; }

# Step 5: exit sig 13 if the clone worked
if rm -rf "$LOCK_DIR" && touch "$IMPORT_COMPLETE"; then
echo "dotCMS Environment $DOT_IMPORT_HOST Imported, happily exiting."
exit 13
fi

# Otherwise, die ugly
echo "Unable complete import"
exit 1
58 changes: 46 additions & 12 deletions dotCMS/src/main/docker/original/ROOT/srv/40-custom-starter-zip.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,60 @@
set -e


### URL to download the custom starter from
CUSTOM_STARTER_URL=${CUSTOM_STARTER_URL:-""}

CUSTOM_STARTER=custom_starter.zip
### dotCMS API Token
CUSTOM_STARTER_URL_AUTH_TOKEN=${CUSTOM_STARTER_URL_AUTH_TOKEN:-""}

## if we have a custom starter
### Or basic auth - in the form of username:password
CUSTOM_STARTER_URL_BASIC_AUTH=${CUSTOM_STARTER_URL_BASIC_AUTH:-""}

### Folder where the custom starter will be downloaded
CUSTOM_STARTER_DATA_FOLDER=${CUSTOM_STARTER_DATA_FOLDER:-"/data/shared"}


## if we dont have a custom starter
if [ -z ${CUSTOM_STARTER_URL} ]; then
echo "Using default starter";
else
if [[ ! -f /data/shared/$CUSTOM_STARTER ]]; then
touch /data/shared/$CUSTOM_STARTER
echo "Downloading Custom Starter:" $CUSTOM_STARTER_URL
mkdir -p /data/shared
curl -s -L -o /data/shared/$CUSTOM_STARTER $CUSTOM_STARTER_URL
if [[ -s /data/shared/$CUSTOM_STARTER ]] ; then
export DOT_STARTER_DATA_LOAD=/data/shared/$CUSTOM_STARTER
else

HASHED_URL=$(echo -n "$CUSTOM_STARTER_URL" | md5sum | cut -d ' ' -f 1)
CUSTOM_STARTER=dotcms-starter-$HASHED_URL.zip

if [[ ! -f $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER ]]; then

echo "CUSTOM_STARTER_DATA_FOLDER: $CUSTOM_STARTER_DATA_FOLDER"
echo "CUSTOM_STARTER_URL: $CUSTOM_STARTER_URL"
echo "HASHED_URL: $HASHED_URL"
echo "CUSTOM_STARTER file: $CUSTOM_STARTER"
if [[ -n $CUSTOM_STARTER_URL_AUTH_TOKEN ]]; then
echo "CUSTOM_STARTER_URL_AUTH_TOKEN: XXXXXX"
fi
if [[ -n $CUSTOM_STARTER_URL_AUTH_TOKEN ]]; then
echo "CUSTOM_STARTER_URL_BASIC_AUTH: XXXXXX:XXXXXX"
fi
mkdir -p $CUSTOM_STARTER_DATA_FOLDER
if [[ -n $CUSTOM_STARTER_URL_AUTH_TOKEN ]]; then
echo "Downloading Custom Starter with Auth Token:" $CUSTOM_STARTER_URL
echo "curl -s -L -o $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER -HAuthorization: Bearer $CUSTOM_STARTER_URL_AUTH_TOKEN $CUSTOM_STARTER_URL"
curl -k -s -L -o $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER -H"Authorization: Bearer $CUSTOM_STARTER_URL_AUTH_TOKEN" "$CUSTOM_STARTER_URL" || echo "Failed to download starter with auth token"
elif [[ -n $CUSTOM_STARTER_URL_BASIC_AUTH ]]; then
echo "Downloading Custom Starter with Basic Auth:" $CUSTOM_STARTER_URL
curl -k -s -L -o $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER -u"$CUSTOM_STARTER_URL_BASIC_AUTH" "$CUSTOM_STARTER_URL" || echo "Failed to download starter with basic auth"
else
echo "Downloading Custom Starter:" $CUSTOM_STARTER_URL
curl -k -s -L -o $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER "$CUSTOM_STARTER_URL" || echo "Failed to download starter"
fi

if [[ -s $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER ]] ; then
export DOT_STARTER_DATA_LOAD=$CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER
else
rm /data/shared/$CUSTOM_STARTER
rm -f $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER
echo "No starter downloaded, skipping"
fi
else
echo "custom starter already downloaded"
echo "if you need to redownload a new starter, delete the existing custom starter file found here: /data/shared/$CUSTOM_STARTER"
echo "if you need to redownload a new starter, delete the existing custom starter file found here: $CUSTOM_STARTER_DATA_FOLDER/$CUSTOM_STARTER"
fi
fi
5 changes: 3 additions & 2 deletions dotCMS/src/main/docker/original/ROOT/srv/50-load-dump-sql.sh
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
#!/bin/bash
set -e


# Check if the DB_LOAD_DUMP environment variable is set and the file exists
if [[ -n "${DB_LOAD_DUMP_SQL}" && -f "${DB_LOAD_DUMP_SQL}" && -z ${CUSTOM_STARTER_URL} ]]; then
echo "Importing database dump from ${DB_LOAD_DUMP_SQL}..."
sleep 10
export PGPASSWORD=${DB_PASSWORD}
/usr/bin/psql -h "${DB_HOST}" -U "${DB_USERNAME}" -d "${DB_NAME}" -f "${DB_LOAD_DUMP_SQL}"

unset PGPASSWORD
echo "Dump successfully imported."
elif [[ -n ${DOT_STARTER_DATA_LOAD} ]]; then
echo "Importing data from starter ${CUSTOM_STARTER_URL}..."
else
echo "Dump file not found [${DB_LOAD_DUMP_SQL}]"
fi
fi
8 changes: 8 additions & 0 deletions dotCMS/src/main/docker/original/ROOT/srv/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,14 @@ umask 007

export TOMCAT_HOME=/srv/dotserver/tomcat

/srv/10-import-env.sh
exit_status=$?
# Check the value
if [ $exit_status -eq 13 ]; then
echo "Import completed, init signing off"
exit 0;
fi

source /srv/20-copy-overriden-files.sh
source /srv/25-generate-dev-ssl-cert.sh
source /srv/30-override-config-props.sh
Expand Down
32 changes: 32 additions & 0 deletions dotCMS/src/main/docker/original/ROOT/srv/test-import.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#!/bin/bash -e

# This script can be used to test the importing functionality of dotCMS.
# Use the environmental variables below to spin up a dotcms docker image
# and clone different environments. If you are running locally, run a
#
# `just build-quicker`
#
# from the project root to quickly build a testable docker image


cat > app.env << 'EOF'
DOT_IMPORT_ENVIRONMENT=https://demo.dotcms.com
DOT_IMPORT_API_TOKEN=
[email protected]:admin
DOT_IMPORT_DROP_DB=true


DB_BASE_URL=jdbc:postgresql://db.dotcms.site/dotcms
DB_DRIVER=org.postgresql.Driver
DB_PASSWORD=password
DB_USERNAME=dotcmsdbuser
DOT_ENABLE_SCRIPTING=true
DOT_ES_AUTH_BASIC_PASSWORD=admin
DOT_ES_AUTH_BASIC_USER=admin
DOT_ES_AUTH_TYPE=BASIC
DOT_ES_ENDPOINTS=https://es.dotcms.site:9200

EOF

docker run --env-file app.env -v $PWD/data:/data --rm -p8080:8082 -p8443:8443 dotcms/dotcms-test:1.0.0-SNAPSHOT