run.config:
engine: ruby
engine.config:
- runtime: ruby-2.4
+ runtime: ruby-2.5
extra_packages:
# basic servers:
# for images:
- ImageMagick
+ - jemalloc
# for videos:
- ffmpeg3
- yarn.lock
extra_steps:
- - envsubst < .env.nanobox > .env
+ - cp .env.nanobox .env
- yarn
fs_watch: true
+
deploy.config:
extra_steps:
- NODE_ENV=production bundle exec rake assets:precompile
transform:
- - "sed 's/LOCAL_HTTPS=.*/LOCAL_HTTPS=true/i' /app/.env.nanobox | envsubst > /app/.env.production"
+ - "envsubst < /app/.env.nanobox > /app/.env.production"
- |-
if [ -z "$LOCAL_DOMAIN" ]
then
before_live:
web.web:
- bundle exec rake db:migrate:setup
+ - |-
+ if [[ "${ES_ENABLED}" != "false" ]]
+ then
+ bin/tootctl search deploy
+ fi
+ - bin/tootctl cache clear
+
web.web:
start:
data.storage:
- public/system
+
web.stream:
start:
nginx: nginx -c /app/nanobox/nginx-stream.conf
writable_dirs:
- tmp
+
worker.sidekiq:
- start: bundle exec sidekiq -c 5 -q default -q mailers -q pull -q push -L /app/log/sidekiq.log
+ start:
+ default: bundle exec sidekiq -c 5 -q default -L /app/log/sidekiq.log
+ mailers: bundle exec sidekiq -c 5 -q mailers -L /app/log/sidekiq.log
+ pull: bundle exec sidekiq -c 5 -q pull -L /app/log/sidekiq.log
+ push: bundle exec sidekiq -c 5 -q push -L /app/log/sidekiq.log
writable_dirs:
- tmp
data.storage:
- public/system
- cron:
- - id: generate_static_gifs
- schedule: '*/15 * * * *'
- command: 'bundle exec rake mastodon:maintenance:add_static_avatars'
-
- - id: update_counter_caches
- schedule: '50 * * * *'
- command: 'bundle exec rake mastodon:maintenance:update_counter_caches'
-
- # runs feeds:clear, media:clear, users:clear, and push:refresh
- - id: do_daily_tasks
- schedule: '00 00 * * *'
- command: 'bundle exec rake mastodon:daily'
-
- - id: clear_silenced_media
- schedule: '10 00 * * *'
- command: 'bundle exec rake mastodon:media:remove_silenced'
-
- - id: clear_remote_media
- schedule: '20 00 * * *'
- command: 'bundle exec rake mastodon:media:remove_remote'
-
- - id: clear_unfollowed_subs
- schedule: '30 00 * * *'
- command: 'bundle exec rake mastodon:push:clear'
-
- - id: send_digest_emails
- schedule: '00 20 * * *'
- command: 'bundle exec rake mastodon:emails:digest'
-
- # The following two tasks can be uncommented to automatically open and close
- # registrations on a schedule. The format of 'schedule' is a standard cron
- # time expression: minute hour day month day-of-week; search for "cron
- # time expressions" for more info on how to set these up. The examples here
- # open registration only from 8 am to 4 pm, server time.
- #
- # - id: open_registrations
- # schedule: '00 08 * * *'
- # command: 'bundle exec rake mastodon:settings:open_registrations'
- #
- # - id: close_registrations
- # schedule: '00 16 * * *'
- # command: 'bundle exec rake mastodon:settings:close_registrations'
data.db:
- image: nanobox/postgresql:9.5
+ image: nanobox/postgresql:9.6
cron:
- id: backup
schedule: '0 3 * * *'
command: |
- PGPASSWORD=${DATA_POSTGRES_PASS} pg_dump -U ${DATA_POSTGRES_USER} -w -Fc -O gonano |
+ PGPASSWORD=${DATA_DB_PASS} pg_dump -U ${DATA_DB_USER} -w -Fc -O gonano |
gzip |
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- &&
+ curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz -X POST -T - >&2
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
- json_pp |
+ sed 's/,/\n/g' |
grep ${HOSTNAME} |
sort |
head -n-${BACKUP_COUNT:-1} |
- sed 's/.*: "\(.*\)".*/\1/' |
+ sed 's/.*: \?"\(.*\)".*/\1/' |
while read file
do
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
done
+
+data.elastic:
+ image: nanobox/elasticsearch:5
+
+ cron:
+ - id: backup
+ schedule: '0 3 * * *'
+ command: |
+ id=$(cat /proc/sys/kernel/random/uuid)
+ curl -X PUT -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}" -d "{\"type\": \"fs\",\"settings\": {\"location\": \"/var/tmp/${id}\",\"compress\": true}}"
+ curl -X PUT -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}/backup?wait_for_completion=true&pretty"
+ tar -cz -C "/var/tmp/${id}" . |
+ curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz -X POST -T - >&2
+ curl -X DELETE -H "Content-Type: application/json" "127.0.0.1:9200/_snapshot/${id}"
+ rm -rf "/var/tmp/${id}"
+ curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
+ sed 's/,/\n/g' |
+ grep ${HOSTNAME} |
+ sort |
+ head -n-${BACKUP_COUNT:-1} |
+ sed 's/.*: \?"\(.*\)".*/\1/' |
+ while read file
+ do
+ curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
+ done
+
+
data.redis:
- image: nanobox/redis:3.0
+ image: nanobox/redis:4.0
cron:
- id: backup
schedule: '0 3 * * *'
command: |
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb &&
+ curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb -X POST -T /data/var/db/redis/dump.rdb >&2
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
- json_pp |
+ sed 's/,/\n/g' |
grep ${HOSTNAME} |
sort |
head -n-${BACKUP_COUNT:-1} |
- sed 's/.*: "\(.*\)".*/\1/' |
+ sed 's/.*: \?"\(.*\)".*/\1/' |
while read file
do
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
done
+
data.storage:
image: nanobox/unfs:0.9
- id: backup
schedule: '0 3 * * *'
command: |
- tar cz -C /data/var/db/unfs/ |
- curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- &&
+ tar cz -C /data/var/db/unfs/ . |
+ curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz -X POST -T - >&2
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
- json_pp |
+ sed 's/,/\n/g' |
grep ${HOSTNAME} |
sort |
head -n-${BACKUP_COUNT:-1} |
- sed 's/.*: "\(.*\)".*/\1/' |
+ sed 's/.*: \?"\(.*\)".*/\1/' |
while read file
do
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE