Compare commits
1 Commits
main
...
backup-res
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b7ef436f3c |
7
Gemfile
7
Gemfile
@ -262,7 +262,6 @@ if ENV["IMPORT"] == "1"
|
||||
end
|
||||
|
||||
gem "web-push"
|
||||
gem "colored2", require: false
|
||||
gem "maxminddb"
|
||||
|
||||
gem "rails_failover", require: false
|
||||
@ -279,3 +278,9 @@ gem "webrick", require: false
|
||||
|
||||
# Workaround until Ruby ships with cgi version 0.3.6 or higher.
|
||||
gem "cgi", ">= 0.3.6", require: false
|
||||
|
||||
# used by backup and restore
|
||||
gem "colored2", require: false
|
||||
gem "mini_tarball", require: false
|
||||
gem "ruby-progressbar", require: false
|
||||
gem "tty-spinner", require: false
|
||||
|
||||
10
Gemfile.lock
10
Gemfile.lock
@ -5,7 +5,6 @@ GIT
|
||||
mail (2.8.0.edge)
|
||||
mini_mime (>= 0.1.1)
|
||||
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
@ -218,6 +217,7 @@ GEM
|
||||
mini_sql (1.4.0)
|
||||
mini_suffix (0.3.3)
|
||||
ffi (~> 1.9)
|
||||
mini_tarball (0.3.1)
|
||||
minitest (5.17.0)
|
||||
mocha (2.0.2)
|
||||
ruby2_keywords (>= 0.0.5)
|
||||
@ -459,6 +459,9 @@ GEM
|
||||
thor (1.2.1)
|
||||
tilt (2.0.11)
|
||||
timeout (0.3.1)
|
||||
tty-cursor (0.7.1)
|
||||
tty-spinner (0.9.3)
|
||||
tty-cursor (~> 0.7)
|
||||
tzinfo (2.0.5)
|
||||
concurrent-ruby (~> 1.0)
|
||||
uglifier (4.2.0)
|
||||
@ -569,6 +572,7 @@ DEPENDENCIES
|
||||
mini_scheduler
|
||||
mini_sql
|
||||
mini_suffix
|
||||
mini_tarball
|
||||
minitest
|
||||
mocha
|
||||
multi_json
|
||||
@ -613,6 +617,7 @@ DEPENDENCIES
|
||||
rswag-specs
|
||||
rubocop-discourse
|
||||
ruby-prof
|
||||
ruby-progressbar
|
||||
ruby-readability
|
||||
rubyzip
|
||||
sanitize
|
||||
@ -630,6 +635,7 @@ DEPENDENCIES
|
||||
syntax_tree-disable_ternary
|
||||
test-prof
|
||||
thor
|
||||
tty-spinner
|
||||
uglifier
|
||||
unf
|
||||
unicorn
|
||||
@ -641,4 +647,4 @@ DEPENDENCIES
|
||||
yaml-lint
|
||||
|
||||
BUNDLED WITH
|
||||
2.4.1
|
||||
2.4.3
|
||||
|
||||
@ -39,7 +39,12 @@ class Admin::BackupsController < Admin::AdminController
|
||||
with_uploads: params.fetch(:with_uploads) == "true",
|
||||
client_id: params[:client_id],
|
||||
}
|
||||
BackupRestore.backup!(current_user.id, opts)
|
||||
|
||||
if SiteSetting.experimental_backup_version
|
||||
BackupRestoreV2.backup!(current_user.id, opts)
|
||||
else
|
||||
BackupRestore.backup!(current_user.id, opts)
|
||||
end
|
||||
rescue BackupRestore::OperationRunningError
|
||||
render_error("backup.operation_already_running")
|
||||
else
|
||||
|
||||
@ -15,6 +15,10 @@ class BackupLocationSiteSetting < EnumSiteSetting
|
||||
]
|
||||
end
|
||||
|
||||
def self.find_by_value(val)
|
||||
values.find { |v| v[:value] == val }
|
||||
end
|
||||
|
||||
def self.translate_names?
|
||||
true
|
||||
end
|
||||
|
||||
@ -3,6 +3,13 @@
|
||||
module HasUrl
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
REMOTE_URL_SQL = "url LIKE '//%' OR url LIKE 'https://%' OR url LIKE 'http://%'"
|
||||
|
||||
included do
|
||||
scope :remote, -> { where(REMOTE_URL_SQL) }
|
||||
scope :local, -> { where.not(REMOTE_URL_SQL) }
|
||||
end
|
||||
|
||||
class_methods do
|
||||
def extract_url(url)
|
||||
url.match(self::URL_REGEX)
|
||||
|
||||
@ -8,6 +8,8 @@ class OptimizedImage < ActiveRecord::Base
|
||||
VERSION = 2
|
||||
URL_REGEX ||= %r{(/optimized/\dX[/\.\w]*/([a-zA-Z0-9]+)[\.\w]*)}
|
||||
|
||||
scope :by_users, -> { where("upload_id > ?", Upload::SEEDED_ID_THRESHOLD) }
|
||||
|
||||
def self.lock(upload_id, width, height)
|
||||
@hostname ||= Discourse.os_hostname
|
||||
# note, the extra lock here ensures we only optimize one image per machine on webs
|
||||
|
||||
@ -2158,6 +2158,9 @@ backups:
|
||||
default: false
|
||||
hidden: true
|
||||
client: true
|
||||
experimental_backup_version:
|
||||
default: false
|
||||
hidden: true
|
||||
|
||||
search:
|
||||
use_pg_headlines_for_excerpt:
|
||||
|
||||
@ -49,6 +49,7 @@ module BackupRestore
|
||||
ActiveRecord::Base.connection.drop_schema(BACKUP_SCHEMA) if backup_schema_dropable?
|
||||
end
|
||||
|
||||
# TODO Reuse from BackupRestoreV2::Database
|
||||
def self.core_migration_files
|
||||
Dir[Rails.root.join(Migration::SafeMigrate.post_migration_path, "**/*.rb")] +
|
||||
Dir[Rails.root.join("db/migrate/*.rb")]
|
||||
|
||||
32
lib/backup_restore_v2.rb
Normal file
32
lib/backup_restore_v2.rb
Normal file
@ -0,0 +1,32 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
FILE_FORMAT = 2
|
||||
DUMP_FILE = "dump.sql.gz"
|
||||
UPLOADS_FILE = "uploads.tar.gz"
|
||||
OPTIMIZED_IMAGES_FILE = "optimized-images.tar.gz"
|
||||
METADATA_FILE = "meta.json"
|
||||
LOGS_CHANNEL = "/admin/backups/logs"
|
||||
|
||||
def self.backup!(user_id, opts = {})
|
||||
if opts[:fork] == false
|
||||
logger =
|
||||
if opts[:cli] == true
|
||||
BackupRestoreV2::Logger::CliLogger.new("backup")
|
||||
else
|
||||
BackupRestoreV2::Logger::DefaultLogger.new(user_id, opts[:client_id], "backup")
|
||||
end
|
||||
BackupRestoreV2::Backuper.new(user_id, logger).run
|
||||
else
|
||||
spawn_process("backup_v2", user_id, opts)
|
||||
end
|
||||
end
|
||||
|
||||
private_class_method def self.spawn_process(type, user_id, opts)
|
||||
script = File.join(Rails.root, "script", "spawn_backup_restore.rb")
|
||||
command = ["bundle", "exec", "ruby", script, type, user_id, opts.to_json].map(&:to_s)
|
||||
|
||||
pid = spawn({ "RAILS_DB" => RailsMultisite::ConnectionManagement.current_db }, *command)
|
||||
Process.detach(pid)
|
||||
end
|
||||
end
|
||||
79
lib/backup_restore_v2/backup/database_dumper.rb
Normal file
79
lib/backup_restore_v2/backup/database_dumper.rb
Normal file
@ -0,0 +1,79 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
module Backup
|
||||
DatabaseBackupError = Class.new(RuntimeError)
|
||||
|
||||
class DatabaseDumper
|
||||
attr_reader :log_lines
|
||||
|
||||
def initialize(schema: BackupRestoreV2::Database::MAIN_SCHEMA, verbose: false)
|
||||
@schema = schema
|
||||
@verbose = verbose
|
||||
@log_lines = []
|
||||
end
|
||||
|
||||
def dump_schema_into(output_stream)
|
||||
Open3.popen3(*pg_dump_command) do |_, stdout, stderr, thread|
|
||||
thread.name = "pg_dump"
|
||||
[thread, output_thread(stdout, output_stream), logger_thread(stderr)].each(&:join)
|
||||
end
|
||||
|
||||
last_line = @log_lines.last
|
||||
if Process.last_status&.exitstatus != 0 || last_line&.delete_prefix!("pg_dump: error: ")
|
||||
raise DatabaseBackupError.new("pg_dump failed: #{last_line}")
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def pg_dump_command
|
||||
db_conf = BackupRestoreV2::Database.database_configuration
|
||||
env = db_conf.password.present? ? { "PGPASSWORD" => db_conf.password } : {}
|
||||
|
||||
host_argument = "--host=#{db_conf.host}" if db_conf.host.present?
|
||||
port_argument = "--port=#{db_conf.port}" if db_conf.port.present?
|
||||
username_argument = "--username=#{db_conf.username}" if db_conf.username.present?
|
||||
verbose = "--verbose" if @verbose
|
||||
|
||||
[
|
||||
env, # pass the password to pg_dump (if any)
|
||||
"pg_dump", # the pg_dump command
|
||||
"--schema=#{@schema}", # only public schema
|
||||
"--exclude-table=#{@schema}.pg_*", # exclude tables and views whose name starts with "pg_"
|
||||
"--no-owner", # do not output commands to set ownership of objects
|
||||
"--no-privileges", # prevent dumping of access privileges
|
||||
"--compress=4", # Compression level of 4
|
||||
verbose, # specifies verbose mode (if enabled)
|
||||
host_argument, # the hostname to connect to (if any)
|
||||
port_argument, # the port to connect to (if any)
|
||||
username_argument, # the username to connect as (if any)
|
||||
db_conf.database, # the name of the database to dump
|
||||
].compact
|
||||
end
|
||||
|
||||
def output_thread(stdout, dump_output_stream)
|
||||
Thread.new do
|
||||
Thread.current.name = "pg_dump_copier"
|
||||
Thread.current.report_on_exception = false
|
||||
|
||||
IO.copy_stream(stdout, dump_output_stream)
|
||||
end
|
||||
end
|
||||
|
||||
def logger_thread(stderr)
|
||||
Thread.new do
|
||||
Thread.current.name = "pg_dump_logger"
|
||||
Thread.current.report_on_exception = false
|
||||
|
||||
while (line = stderr.readline)
|
||||
line.chomp!
|
||||
@log_lines << line
|
||||
end
|
||||
rescue EOFError
|
||||
# finished reading...
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
64
lib/backup_restore_v2/backup/metadata_writer.rb
Normal file
64
lib/backup_restore_v2/backup/metadata_writer.rb
Normal file
@ -0,0 +1,64 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "json"
|
||||
|
||||
module BackupRestoreV2
|
||||
module Backup
|
||||
class MetadataWriter
|
||||
attr_accessor :upload_stats, :optimized_image_stats
|
||||
|
||||
def initialize(uploads_stats = nil, optimized_images_stats = nil)
|
||||
@upload_stats = uploads_stats
|
||||
@optimized_image_stats = optimized_images_stats
|
||||
end
|
||||
|
||||
def write_into(output_stream)
|
||||
output_stream.write(json)
|
||||
end
|
||||
|
||||
def estimated_file_size
|
||||
# adding an additional KB to account for unknown upload stats
|
||||
json.bytesize + 1.kilobyte
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def json
|
||||
@cached_data ||= {
|
||||
backup_format: FILE_FORMAT,
|
||||
discourse_version: Discourse::VERSION::STRING,
|
||||
db_version: Database.current_core_migration_version,
|
||||
git_version: Discourse.git_version,
|
||||
git_branch: Discourse.git_branch,
|
||||
base_url: Discourse.base_url,
|
||||
cdn_url: Discourse.asset_host,
|
||||
s3_base_url: SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_base_url : nil,
|
||||
s3_cdn_url: SiteSetting.Upload.enable_s3_uploads ? SiteSetting.Upload.s3_cdn_url : nil,
|
||||
db_name: RailsMultisite::ConnectionManagement.current_db,
|
||||
multisite: Rails.configuration.multisite,
|
||||
plugins: plugin_list,
|
||||
}
|
||||
|
||||
data =
|
||||
@cached_data.merge({ uploads: @upload_stats, optimized_images: @optimized_image_stats })
|
||||
|
||||
JSON.pretty_generate(data)
|
||||
end
|
||||
|
||||
def plugin_list
|
||||
plugins = []
|
||||
|
||||
Discourse.visible_plugins.each do |plugin|
|
||||
plugins << {
|
||||
name: plugin.name,
|
||||
enabled: plugin.enabled?,
|
||||
db_version: Database.current_plugin_migration_version(plugin),
|
||||
git_version: plugin.git_version,
|
||||
}
|
||||
end
|
||||
|
||||
plugins.sort_by { |p| p[:name] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
130
lib/backup_restore_v2/backup/upload_backuper.rb
Normal file
130
lib/backup_restore_v2/backup/upload_backuper.rb
Normal file
@ -0,0 +1,130 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "mini_tarball"
|
||||
|
||||
module BackupRestoreV2
|
||||
module Backup
|
||||
class UploadBackuper
|
||||
def self.include_uploads?
|
||||
Upload.exists?(Upload.by_users.local) ||
|
||||
(SiteSetting.include_s3_uploads_in_backups && Upload.exists?(Upload.by_users.remote))
|
||||
end
|
||||
|
||||
def self.include_optimized_images?
|
||||
# never include optimized images stored on S3
|
||||
SiteSetting.include_thumbnails_in_backups &&
|
||||
OptimizedImage.exists?(OptimizedImage.by_users.local)
|
||||
end
|
||||
|
||||
def initialize(tmp_directory, progress_logger)
|
||||
@tmp_directory = tmp_directory
|
||||
@progress_logger = progress_logger
|
||||
end
|
||||
|
||||
def compress_uploads_into(output_stream)
|
||||
@stats = create_stats(Upload.by_users.count)
|
||||
@progress_logger.start(@stats.total_count)
|
||||
|
||||
with_gzip(output_stream) { |tar_writer| add_original_files(tar_writer) }
|
||||
|
||||
@stats
|
||||
end
|
||||
|
||||
def compress_optimized_images_into(output_stream)
|
||||
@stats = create_stats(OptimizedImage.by_users.count)
|
||||
@progress_logger.start(@stats.total_count)
|
||||
|
||||
with_gzip(output_stream) { |tar_writer| add_optimized_files(tar_writer) }
|
||||
|
||||
@stats
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def with_gzip(output_stream)
|
||||
uploads_gz =
|
||||
Zlib::GzipWriter.new(output_stream, SiteSetting.backup_gzip_compression_level_for_uploads)
|
||||
MiniTarball::Writer.use(uploads_gz) { |uploads_tar| yield(uploads_tar) }
|
||||
end
|
||||
|
||||
def add_original_files(tar_writer)
|
||||
Upload.by_users.find_each do |upload|
|
||||
paths_of_upload(upload) do |relative_path, absolute_path|
|
||||
if absolute_path.present?
|
||||
if File.exist?(absolute_path)
|
||||
tar_writer.add_file(name: relative_path, source_file_path: absolute_path)
|
||||
@stats.included_count += 1
|
||||
else
|
||||
@stats.missing_count += 1
|
||||
@progress_logger.log("Failed to locate file for upload with ID #{upload.id}")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@progress_logger.increment
|
||||
end
|
||||
end
|
||||
|
||||
def add_optimized_files(tar_writer)
|
||||
OptimizedImage.by_users.local.find_each do |optimized_image|
|
||||
relative_path = base_store.get_path_for_optimized_image(optimized_image)
|
||||
absolute_path = File.join(upload_path_prefix, relative_path)
|
||||
|
||||
if File.exist?(absolute_path)
|
||||
tar_writer.add_file(name: relative_path, source_file_path: absolute_path)
|
||||
@stats.included_count += 1
|
||||
else
|
||||
@stats.missing_count += 1
|
||||
@progress_logger.log(
|
||||
"Failed to locate file for optimized image with ID #{optimized_image.id}",
|
||||
)
|
||||
end
|
||||
|
||||
@progress_logger.increment
|
||||
end
|
||||
end
|
||||
|
||||
def paths_of_upload(upload)
|
||||
is_local_upload = upload.local?
|
||||
relative_path = base_store.get_path_for_upload(upload)
|
||||
|
||||
if is_local_upload
|
||||
absolute_path = File.join(upload_path_prefix, relative_path)
|
||||
else
|
||||
absolute_path = File.join(@tmp_directory, upload.sha1)
|
||||
|
||||
begin
|
||||
s3_store.download_file(upload, absolute_path)
|
||||
rescue => ex
|
||||
absolute_path = nil
|
||||
@stats.missing_count += 1
|
||||
@progress_logger.log(
|
||||
"Failed to download file from S3 for upload with ID #{upload.id}",
|
||||
ex,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
yield(relative_path, absolute_path)
|
||||
|
||||
FileUtils.rm_f(absolute_path) if !is_local_upload && absolute_path
|
||||
end
|
||||
|
||||
def base_store
|
||||
@base_store ||= FileStore::BaseStore.new
|
||||
end
|
||||
|
||||
def s3_store
|
||||
@s3_store ||= FileStore::S3Store.new
|
||||
end
|
||||
|
||||
def upload_path_prefix
|
||||
@upload_path_prefix ||= File.join(Rails.root, "public", base_store.upload_path)
|
||||
end
|
||||
|
||||
def create_stats(total)
|
||||
BackupRestoreV2::Backup::UploadStats.new(total_count: total)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
27
lib/backup_restore_v2/backup/upload_stats.rb
Normal file
27
lib/backup_restore_v2/backup/upload_stats.rb
Normal file
@ -0,0 +1,27 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
module Backup
|
||||
class UploadStats
|
||||
attr_accessor :total_count, :included_count, :missing_count
|
||||
|
||||
def initialize(total_count: 0, included_count: 0, missing_count: 0)
|
||||
@total_count = total_count
|
||||
@included_count = included_count
|
||||
@missing_count = missing_count
|
||||
end
|
||||
|
||||
def as_json(options = {})
|
||||
{
|
||||
total_count: @total_count,
|
||||
included_count: @included_count,
|
||||
missing_count: @missing_count,
|
||||
}
|
||||
end
|
||||
|
||||
def to_json(*options)
|
||||
as_json(*options).to_json(*options)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
289
lib/backup_restore_v2/backuper.rb
Normal file
289
lib/backup_restore_v2/backuper.rb
Normal file
@ -0,0 +1,289 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "etc"
|
||||
require "mini_tarball"
|
||||
|
||||
module BackupRestoreV2
|
||||
class Backuper
|
||||
delegate :log, :log_event, :log_step, :log_warning, :log_error, to: :@logger, private: true
|
||||
attr_reader :success
|
||||
|
||||
# @param [Hash] opts
|
||||
# @option opts [String] :backup_path_override
|
||||
# @option opts [String] :ticket
|
||||
def initialize(user_id, logger, opts = {})
|
||||
@user = User.find_by(id: user_id) || Discourse.system_user
|
||||
@logger = logger
|
||||
@opts = opts
|
||||
end
|
||||
|
||||
def run
|
||||
log_event "[STARTED]"
|
||||
log "User '#{@user.username}' started backup"
|
||||
|
||||
initialize_backup
|
||||
create_backup
|
||||
upload_backup
|
||||
finalize_backup
|
||||
|
||||
@success = true
|
||||
@backup_path
|
||||
rescue SystemExit, SignalException
|
||||
log_warning "Backup operation was canceled!"
|
||||
rescue BackupRestoreV2::OperationRunningError
|
||||
log_error "Operation is already running"
|
||||
ensure
|
||||
clean_up
|
||||
notify_user
|
||||
complete
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def initialize_backup
|
||||
log_step("Initializing backup") do
|
||||
@success = false
|
||||
@store = BackupRestore::BackupStore.create
|
||||
|
||||
BackupRestoreV2::Operation.start
|
||||
|
||||
timestamp = Time.now.utc.strftime("%Y-%m-%dT%H%M%SZ")
|
||||
current_db = RailsMultisite::ConnectionManagement.current_db
|
||||
archive_directory_override, filename_override = calculate_path_overrides
|
||||
archive_directory =
|
||||
archive_directory_override ||
|
||||
BackupRestore::LocalBackupStore.base_directory(db: current_db)
|
||||
|
||||
filename =
|
||||
filename_override ||
|
||||
begin
|
||||
parameterized_title = SiteSetting.title.parameterize.presence || "discourse"
|
||||
"#{parameterized_title}-#{timestamp}"
|
||||
end
|
||||
|
||||
@backup_filename = "#{filename}.tar"
|
||||
@backup_path = File.join(archive_directory, @backup_filename)
|
||||
@tmp_directory = File.join(Rails.root, "tmp", "backups", current_db, timestamp)
|
||||
|
||||
FileUtils.mkdir_p(archive_directory)
|
||||
FileUtils.mkdir_p(@tmp_directory)
|
||||
end
|
||||
end
|
||||
|
||||
def create_backup
|
||||
metadata_writer = BackupRestoreV2::Backup::MetadataWriter.new
|
||||
|
||||
MiniTarball::Writer.create(@backup_path) do |tar_writer|
|
||||
metadata_placeholder = add_metadata_placeholder(tar_writer, metadata_writer)
|
||||
add_db_dump(tar_writer)
|
||||
metadata_writer.upload_stats = add_uploads(tar_writer)
|
||||
metadata_writer.optimized_image_stats = add_optimized_images(tar_writer)
|
||||
add_metadata(tar_writer, metadata_writer, metadata_placeholder)
|
||||
end
|
||||
end
|
||||
|
||||
# Adds an empty file to the backup archive which acts as a placeholder for the `meta.json` file.
|
||||
# This file needs to be the first file in the backup archive in order to allow reading the backup's
|
||||
# metadata without downloading the whole file. The file size is estimated because some of the data
|
||||
# is still unknown at this time.
|
||||
# @param [MiniTarball::Writer] tar_writer
|
||||
# @param [BackupRestoreV2::Backup::MetadataWriter] metadata_writer
|
||||
# @return [Integer] index of the placeholder
|
||||
def add_metadata_placeholder(tar_writer, metadata_writer)
|
||||
tar_writer.add_file_placeholder(
|
||||
name: BackupRestoreV2::METADATA_FILE,
|
||||
file_size: metadata_writer.estimated_file_size,
|
||||
)
|
||||
end
|
||||
|
||||
# Streams the database dump directly into the backup archive.
|
||||
# @param [MiniTarball::Writer] tar_writer
|
||||
def add_db_dump(tar_writer)
|
||||
log_step("Creating database dump") do
|
||||
tar_writer.add_file_from_stream(
|
||||
name: BackupRestoreV2::DUMP_FILE,
|
||||
**tar_file_attributes,
|
||||
) do |output_stream|
|
||||
dumper = Backup::DatabaseDumper.new
|
||||
dumper.dump_schema_into(output_stream)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Streams uploaded files directly into the backup archive.
|
||||
# @param [MiniTarball::Writer] tar_writer
|
||||
def add_uploads(tar_writer)
|
||||
if skip_uploads? || !Backup::UploadBackuper.include_uploads?
|
||||
log "Skipping uploads"
|
||||
return
|
||||
end
|
||||
|
||||
stats = nil
|
||||
|
||||
log_step("Adding uploads", with_progress: true) do |progress_logger|
|
||||
tar_writer.add_file_from_stream(
|
||||
name: BackupRestoreV2::UPLOADS_FILE,
|
||||
**tar_file_attributes,
|
||||
) do |output_stream|
|
||||
backuper = Backup::UploadBackuper.new(@tmp_directory, progress_logger)
|
||||
stats = backuper.compress_uploads_into(output_stream)
|
||||
end
|
||||
end
|
||||
|
||||
if stats && stats.missing_count > 0
|
||||
log_warning "Failed to add #{stats.missing_count} uploads. See logfile for details."
|
||||
end
|
||||
|
||||
stats
|
||||
end
|
||||
|
||||
# Streams optimized images directly into the backup archive.
|
||||
# @param [MiniTarball::Writer] tar_writer
|
||||
def add_optimized_images(tar_writer)
|
||||
if skip_uploads? || !Backup::UploadBackuper.include_optimized_images?
|
||||
log "Skipping optimized images"
|
||||
return
|
||||
end
|
||||
|
||||
stats = nil
|
||||
|
||||
log_step("Adding optimized images", with_progress: true) do |progress_logger|
|
||||
tar_writer.add_file_from_stream(
|
||||
name: BackupRestoreV2::OPTIMIZED_IMAGES_FILE,
|
||||
**tar_file_attributes,
|
||||
) do |output_stream|
|
||||
backuper = Backup::UploadBackuper.new(@tmp_directory, progress_logger)
|
||||
stats = backuper.compress_optimized_images_into(output_stream)
|
||||
end
|
||||
end
|
||||
|
||||
if stats && stats.missing_count > 0
|
||||
log_warning "Failed to add #{stats.missing_count} optimized images. See logfile for details."
|
||||
end
|
||||
|
||||
stats
|
||||
end
|
||||
|
||||
# Overwrites the `meta.json` file at the beginning of the backup archive.
|
||||
# @param [MiniTarball::Writer] tar_writer
|
||||
# @param [BackupRestoreV2::Backup::MetadataWriter] metadata_writer
|
||||
# @param [Integer] placeholder index of the placeholder
|
||||
def add_metadata(tar_writer, metadata_writer, placeholder)
|
||||
log_step("Adding metadata file") do
|
||||
tar_writer.with_placeholder(placeholder) do |writer|
|
||||
writer.add_file_from_stream(
|
||||
name: BackupRestoreV2::METADATA_FILE,
|
||||
**tar_file_attributes,
|
||||
) { |output_stream| metadata_writer.write_into(output_stream) }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def upload_backup
|
||||
return unless @store.remote?
|
||||
|
||||
file_size = File.size(@backup_path)
|
||||
file_size =
|
||||
Object.new.extend(ActionView::Helpers::NumberHelper).number_to_human_size(file_size)
|
||||
|
||||
log_step("Uploading backup (#{file_size})") do
|
||||
@store.upload_file(@backup_filename, @backup_path, "application/x-tar")
|
||||
end
|
||||
end
|
||||
|
||||
def finalize_backup
|
||||
log_step("Finalizing backup") { DiscourseEvent.trigger(:backup_created) }
|
||||
end
|
||||
|
||||
def clean_up
|
||||
log_step("Cleaning up") do
|
||||
# delete backup if there was an error or the file was uploaded to a remote store
|
||||
if @backup_path && File.exist?(@backup_path) && (!@success || @store.remote?)
|
||||
File.delete(@backup_path)
|
||||
end
|
||||
|
||||
# delete the temp directory
|
||||
FileUtils.rm_rf(@tmp_directory) if @tmp_directory && Dir.exist?(@tmp_directory)
|
||||
|
||||
if Rails.env.development?
|
||||
@store&.reset_cache
|
||||
else
|
||||
@store&.delete_old
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def notify_user
|
||||
return if @success && @user.id == Discourse::SYSTEM_USER_ID
|
||||
|
||||
log_step("Notifying user") do
|
||||
status = @success ? :backup_succeeded : :backup_failed
|
||||
logs = Discourse::Utils.logs_markdown(@logger.logs, user: @user)
|
||||
post = SystemMessage.create_from_system_user(@user, status, logs: logs)
|
||||
|
||||
post.topic.invite_group(@user, Group[:admins]) if @user.id == Discourse::SYSTEM_USER_ID
|
||||
end
|
||||
end
|
||||
|
||||
def complete
|
||||
begin
|
||||
BackupRestoreV2::Operation.finish
|
||||
rescue => e
|
||||
log_error "Failed to mark operation as finished", e
|
||||
end
|
||||
|
||||
if @success
|
||||
if @store.remote?
|
||||
location = BackupLocationSiteSetting.find_by_value(SiteSetting.backup_location)
|
||||
location = I18n.t("admin_js.#{location[:name]}") if location
|
||||
log "Backup stored on #{location} as #{@backup_filename}"
|
||||
else
|
||||
log "Backup stored at: #{@backup_path}"
|
||||
end
|
||||
|
||||
if @logger.warnings?
|
||||
log_warning "Backup completed with warnings!"
|
||||
else
|
||||
log "Backup completed successfully!"
|
||||
end
|
||||
|
||||
log_event "[SUCCESS]"
|
||||
DiscourseEvent.trigger(:backup_complete, logs: @logger.logs, ticket: @opts[:ticket])
|
||||
else
|
||||
log_error "Backup failed!"
|
||||
log_event "[FAILED]"
|
||||
DiscourseEvent.trigger(:backup_failed, logs: @logger.logs, ticket: @opts[:ticket])
|
||||
end
|
||||
end
|
||||
|
||||
def tar_file_attributes
|
||||
@tar_file_attributes ||= {
|
||||
uid: Process.uid,
|
||||
gid: Process.gid,
|
||||
uname: Etc.getpwuid(Process.uid).name,
|
||||
gname: Etc.getgrgid(Process.gid).name,
|
||||
}
|
||||
end
|
||||
|
||||
def calculate_path_overrides
|
||||
backup_path_override = @opts[:backup_path_override]
|
||||
|
||||
if @opts[:backup_path_override].present?
|
||||
archive_directory_override = File.dirname(backup_path_override).sub(/^\.$/, "")
|
||||
|
||||
if archive_directory_override.present? && @store.remote?
|
||||
log_warning "Only local backup storage supports overriding backup path."
|
||||
archive_directory_override = nil
|
||||
end
|
||||
|
||||
filename_override =
|
||||
File.basename(backup_path_override).sub(/\.(sql\.gz|tar|tar\.gz|tgz)$/i, "")
|
||||
[archive_directory_override, filename_override]
|
||||
end
|
||||
end
|
||||
|
||||
def skip_uploads?
|
||||
!@opts.fetch(:with_uploads, true)
|
||||
end
|
||||
end
|
||||
end
|
||||
66
lib/backup_restore_v2/database.rb
Normal file
66
lib/backup_restore_v2/database.rb
Normal file
@ -0,0 +1,66 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
DatabaseConfiguration = Struct.new(:host, :port, :username, :password, :database)
|
||||
|
||||
module Database
|
||||
MAIN_SCHEMA = "public"
|
||||
|
||||
def self.database_configuration
|
||||
config = ActiveRecord::Base.connection_pool.db_config.configuration_hash
|
||||
config = config.with_indifferent_access
|
||||
|
||||
# credentials for PostgreSQL in CI environment
|
||||
if Rails.env.test?
|
||||
username = ENV["PGUSER"]
|
||||
password = ENV["PGPASSWORD"]
|
||||
end
|
||||
|
||||
DatabaseConfiguration.new(
|
||||
config["backup_host"] || config["host"],
|
||||
config["backup_port"] || config["port"],
|
||||
config["username"] || username || ENV["USER"] || "postgres",
|
||||
config["password"] || password,
|
||||
config["database"],
|
||||
)
|
||||
end
|
||||
|
||||
def self.core_migration_files
|
||||
files = Dir[Rails.root.join(Discourse::DB_POST_MIGRATE_PATH, "*.rb")]
|
||||
|
||||
ActiveRecord::Migrator.migrations_paths.each do |path|
|
||||
files.concat(Dir[Rails.root.join(path, "*.rb")])
|
||||
end
|
||||
|
||||
files
|
||||
end
|
||||
|
||||
def self.current_core_migration_version
|
||||
current_migration_version(core_migration_files)
|
||||
end
|
||||
|
||||
def self.current_plugin_migration_version(plugin)
|
||||
current_migration_version(plugin_migration_files(plugin))
|
||||
end
|
||||
|
||||
private_class_method def self.plugin_migration_files(plugin)
|
||||
plugin_root = plugin.directory
|
||||
files = Dir[File.join(plugin_root, "/db/migrate/*.rb")]
|
||||
files.concat(Dir[File.join(plugin_root, Discourse::DB_POST_MIGRATE_PATH, "*.rb")])
|
||||
files
|
||||
end
|
||||
|
||||
private_class_method def self.current_migration_version(migration_files)
|
||||
return 0 if !ActiveRecord::SchemaMigration.table_exists?
|
||||
|
||||
migration_versions =
|
||||
migration_files.map do |path|
|
||||
filename = File.basename(path)
|
||||
filename[/^\d+/]&.to_i || 0
|
||||
end
|
||||
|
||||
db_versions = ActiveRecord::SchemaMigration.all_versions.map(&:to_i)
|
||||
migration_versions.intersection(db_versions).max || 0
|
||||
end
|
||||
end
|
||||
end
|
||||
77
lib/backup_restore_v2/logger/base_logger.rb
Normal file
77
lib/backup_restore_v2/logger/base_logger.rb
Normal file
@ -0,0 +1,77 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
module Logger
|
||||
INFO = :info
|
||||
WARNING = :warning
|
||||
ERROR = :error
|
||||
|
||||
class BaseLogger
|
||||
attr_reader :logs
|
||||
|
||||
def initialize
|
||||
@logs = []
|
||||
@warning_count = 0
|
||||
@error_count = 0
|
||||
end
|
||||
|
||||
def log_event(event)
|
||||
end
|
||||
|
||||
def log_step(message, with_progress: false)
|
||||
log(message)
|
||||
|
||||
if with_progress
|
||||
yield(BaseProgressLogger.new)
|
||||
else
|
||||
yield
|
||||
end
|
||||
end
|
||||
|
||||
def log(message, level: Logger::INFO)
|
||||
raise NotImplementedError
|
||||
end
|
||||
|
||||
def log_warning(message, ex = nil)
|
||||
log_with_exception(message, ex, Logger::WARNING)
|
||||
@warning_count += 1
|
||||
end
|
||||
|
||||
def log_error(message, ex = nil)
|
||||
log_with_exception(message, ex, Logger::ERROR)
|
||||
@error_count += 1
|
||||
end
|
||||
|
||||
def warnings?
|
||||
@warning_count > 0
|
||||
end
|
||||
|
||||
def errors?
|
||||
@error_count > 0
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def create_timestamp
|
||||
Time.now.utc.strftime("%Y-%m-%d %H:%M:%S")
|
||||
end
|
||||
|
||||
def log_with_exception(message, ex, level)
|
||||
log(message, level: level)
|
||||
log(format_exception(ex), level: level) if ex
|
||||
end
|
||||
|
||||
def format_exception(ex)
|
||||
<<~MSG
|
||||
EXCEPTION: #{ex.message}
|
||||
Backtrace:
|
||||
\t#{format_backtrace(ex)}
|
||||
MSG
|
||||
end
|
||||
|
||||
def format_backtrace(ex)
|
||||
ex.backtrace.join("\n\t")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
14
lib/backup_restore_v2/logger/base_progress_logger.rb
Normal file
14
lib/backup_restore_v2/logger/base_progress_logger.rb
Normal file
@ -0,0 +1,14 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
module Logger
|
||||
class BaseProgressLogger
|
||||
def start(max_progress)
|
||||
end
|
||||
def increment
|
||||
end
|
||||
def log(message, ex = nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
147
lib/backup_restore_v2/logger/cli_logger.rb
Normal file
147
lib/backup_restore_v2/logger/cli_logger.rb
Normal file
@ -0,0 +1,147 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "colored2"
|
||||
require "tty-spinner"
|
||||
|
||||
module BackupRestoreV2
|
||||
module Logger
|
||||
class CliLogger < BaseLogger
|
||||
def initialize(operation)
|
||||
super()
|
||||
|
||||
timestamp = Time.now.utc.strftime("%Y-%m-%dT%H%M%SZ")
|
||||
current_db = RailsMultisite::ConnectionManagement.current_db
|
||||
path = File.join(Rails.root, "log", "backups", current_db)
|
||||
FileUtils.mkdir_p(path)
|
||||
path = File.join(path, "#{operation}-#{timestamp}.log")
|
||||
|
||||
@logfile = File.new(path, "w")
|
||||
log_to_stdout("Logging to #{path}")
|
||||
end
|
||||
|
||||
def close
|
||||
@logfile.close
|
||||
end
|
||||
|
||||
def log_step(message, with_progress: false)
|
||||
if with_progress
|
||||
logger = CliProgressLogger.new(message, self)
|
||||
begin
|
||||
yield(logger)
|
||||
logger.success
|
||||
rescue Exception
|
||||
logger.error
|
||||
raise
|
||||
end
|
||||
else
|
||||
spin(message, abort_on_error: false) { yield }
|
||||
end
|
||||
nil
|
||||
end
|
||||
|
||||
def log(message, level: Logger::INFO)
|
||||
log_to_stdout(message, level)
|
||||
log_to_logfile(message, level)
|
||||
end
|
||||
|
||||
def log_to_stdout(message, level = Logger::INFO)
|
||||
case level
|
||||
when Logger::INFO
|
||||
puts "INFO " + " #{message}"
|
||||
when Logger::ERROR
|
||||
puts "FAIL ".red + " #{message}"
|
||||
when Logger::WARNING
|
||||
puts "WARN ".yellow + " #{message}"
|
||||
else
|
||||
puts message
|
||||
end
|
||||
end
|
||||
|
||||
def log_to_logfile(message, level = Logger::INFO)
|
||||
timestamp = Time.now.utc.iso8601
|
||||
|
||||
case level
|
||||
when Logger::INFO
|
||||
@logfile.puts("[#{timestamp}] INFO: #{message}")
|
||||
when Logger::ERROR
|
||||
@logfile.puts("[#{timestamp}] ERROR: #{message}")
|
||||
when Logger::WARNING
|
||||
@logfile.puts("[#{timestamp}] WARN: #{message}")
|
||||
else
|
||||
@logfile.puts("[#{timestamp}] #{message}")
|
||||
end
|
||||
end
|
||||
|
||||
private def spin(title, abort_on_error)
|
||||
result = nil
|
||||
|
||||
spinner = abort_on_error ? error_spinner : warning_spinner
|
||||
spinner.update(title: title)
|
||||
spinner.auto_spin
|
||||
|
||||
begin
|
||||
result = yield
|
||||
spinner.success
|
||||
rescue Exception
|
||||
spinner.error
|
||||
raise if abort_on_error
|
||||
end
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
private def error_spinner
|
||||
@error_spinner ||= create_spinner(show_warning_instead_of_error: false)
|
||||
end
|
||||
|
||||
private def warning_spinner
|
||||
@warning_spinner ||= create_spinner(show_warning_instead_of_error: true)
|
||||
end
|
||||
|
||||
private def create_spinner(show_warning_instead_of_error:)
|
||||
output = $stderr
|
||||
|
||||
if output.tty? && !ENV["RM_INFO"] # special case for RubyMine
|
||||
TTY::Spinner.new(
|
||||
":spinner :title",
|
||||
success_mark: "DONE ".green,
|
||||
error_mark: show_warning_instead_of_error ? "WARN ".yellow : "FAIL ".red,
|
||||
interval: 10,
|
||||
frames: [" ", " = ", " == ", " === ", "==== ", "=== ", "== ", "= "],
|
||||
)
|
||||
else
|
||||
DummySpinner.new(
|
||||
success_mark: "DONE".green,
|
||||
error_mark: show_warning_instead_of_error ? "WARN".yellow : "FAIL".red,
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# A very simple implementation to make the spinner work when there's no TTY
|
||||
class DummySpinner
|
||||
def initialize(format: ":title... ", success_mark: "✓", error_mark: "✘")
|
||||
@format = format
|
||||
@success_mark = success_mark
|
||||
@error_mark = error_mark
|
||||
end
|
||||
|
||||
def auto_spin
|
||||
text = @title ? @format.gsub(":title", @title) : @format
|
||||
print(text)
|
||||
end
|
||||
|
||||
def update(title:)
|
||||
@title = title
|
||||
end
|
||||
|
||||
def success
|
||||
puts(@success_mark)
|
||||
end
|
||||
|
||||
def error
|
||||
puts(@error_mark)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
67
lib/backup_restore_v2/logger/cli_progress_logger.rb
Normal file
67
lib/backup_restore_v2/logger/cli_progress_logger.rb
Normal file
@ -0,0 +1,67 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "colored2"
|
||||
require "ruby-progressbar"
|
||||
|
||||
module BackupRestoreV2
|
||||
module Logger
|
||||
class CliProgressLogger < BaseProgressLogger
|
||||
def initialize(message, logger)
|
||||
@message = message
|
||||
@logger = logger
|
||||
|
||||
@progressbar =
|
||||
ProgressBar.create(
|
||||
format: " %j%% %t | %c / %C | %E",
|
||||
title: @message,
|
||||
autofinish: false,
|
||||
smoothing: 0.5,
|
||||
)
|
||||
end
|
||||
|
||||
def start(max_progress)
|
||||
@progress = 0
|
||||
@max_progress = max_progress
|
||||
|
||||
@progressbar.progress = @progress
|
||||
@progressbar.total = @max_progress
|
||||
|
||||
log_progress
|
||||
end
|
||||
|
||||
def increment
|
||||
@progress += 1
|
||||
@progressbar.increment
|
||||
log_progress if @progress % 50 == 0
|
||||
end
|
||||
|
||||
def log(message, ex = nil)
|
||||
@logger.log_to_logfile(message, Logger::WARNING)
|
||||
end
|
||||
|
||||
def success
|
||||
reset_current_line
|
||||
@progressbar.format = "%t | %c / %C | %E"
|
||||
@progressbar.title = "DONE ".green + " #{@message}"
|
||||
@progressbar.finish
|
||||
end
|
||||
|
||||
def error
|
||||
reset_current_line
|
||||
@progressbar.format = "%t | %c / %C | %E"
|
||||
@progressbar.title = "FAIL ".red + " #{@message}"
|
||||
@progressbar.finish
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def log_progress
|
||||
@logger.log_to_logfile("#{@message} | #{@progress} / #{@max_progress}")
|
||||
end
|
||||
|
||||
def reset_current_line
|
||||
print "\033[K" # delete the output of progressbar, because it doesn't overwrite longer lines
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
50
lib/backup_restore_v2/logger/default_logger.rb
Normal file
50
lib/backup_restore_v2/logger/default_logger.rb
Normal file
@ -0,0 +1,50 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
module Logger
|
||||
class DefaultLogger < BaseLogger
|
||||
# @param operation "backup" or "restore"
|
||||
def initialize(user_id, client_id, operation)
|
||||
super()
|
||||
@user_id = user_id
|
||||
@client_id = client_id
|
||||
@operation = operation
|
||||
@logs = []
|
||||
end
|
||||
|
||||
# Events are used by the UI, so we need to publish it via MessageBus.
|
||||
def log_event(event)
|
||||
publish_log(event, create_timestamp)
|
||||
end
|
||||
|
||||
def log(message, level: Logger::INFO)
|
||||
timestamp = create_timestamp
|
||||
publish_log(message, timestamp)
|
||||
save_log(message, timestamp)
|
||||
|
||||
case level
|
||||
when Logger::WARNING
|
||||
@warning_count += 1
|
||||
when Logger::ERROR
|
||||
@error_count += 1
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def publish_log(message, timestamp)
|
||||
data = { timestamp: timestamp, operation: @operation, message: message }
|
||||
MessageBus.publish(
|
||||
BackupRestoreV2::LOGS_CHANNEL,
|
||||
data,
|
||||
user_ids: [@user_id],
|
||||
client_ids: [@client_id],
|
||||
)
|
||||
end
|
||||
|
||||
def save_log(message, timestamp)
|
||||
@logs << "[#{timestamp}] #{message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
74
lib/backup_restore_v2/logger_v2.rb
Normal file
74
lib/backup_restore_v2/logger_v2.rb
Normal file
@ -0,0 +1,74 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
class LoggerV2
|
||||
def initialize
|
||||
@warning_count = 1
|
||||
@error_count = 1
|
||||
|
||||
path = "/tmp/backup.log"
|
||||
FileUtils.rm_f(path)
|
||||
@channels = [CommandlineLogChannel.new, FileLogChannel.new(path)]
|
||||
end
|
||||
|
||||
def debug(message)
|
||||
log(::Logger::Severity::DEBUG, message)
|
||||
end
|
||||
|
||||
def info(message)
|
||||
log(::Logger::Severity::INFO, message)
|
||||
end
|
||||
|
||||
def warn(message, exception = nil)
|
||||
@warning_count += 1
|
||||
log(::Logger::Severity::WARN, message, exception)
|
||||
end
|
||||
|
||||
def error(message, exception = nil)
|
||||
@error_count += 1
|
||||
log(::Logger::Severity::ERROR, message, exception)
|
||||
end
|
||||
|
||||
def fatal(message, exception = nil)
|
||||
@error_count += 1
|
||||
log(::Logger::Severity::FATAL, message, exception)
|
||||
end
|
||||
|
||||
def log(severity, message, exception = nil)
|
||||
@channels.each { |channel| channel.log(severity, message, exception) }
|
||||
end
|
||||
|
||||
def warnings?
|
||||
@warning_count > 0
|
||||
end
|
||||
|
||||
def errors?
|
||||
@error_count > 0
|
||||
end
|
||||
|
||||
def event(message)
|
||||
@channels.each { |channel| channel.trigger_event(message) }
|
||||
end
|
||||
|
||||
def step(message, severity: ::Logger::Severity::INFO)
|
||||
@channels.each { |channel| channel.start_step(severity, message) }
|
||||
|
||||
@channels.each { |channel| channel.stop_step(severity, message) }
|
||||
end
|
||||
|
||||
def step_with_progress(message, severity: ::Logger::Severity::INFO)
|
||||
progress_logger = ProgressLogger.new(message, @channels)
|
||||
|
||||
begin
|
||||
yield progress_logger
|
||||
progress_logger.success
|
||||
rescue StandardError
|
||||
progress_logger.error
|
||||
end
|
||||
end
|
||||
|
||||
def close
|
||||
@channels.each(&:close)
|
||||
end
|
||||
end
|
||||
end
|
||||
60
lib/backup_restore_v2/logger_v2/commandline_log_channel.rb
Normal file
60
lib/backup_restore_v2/logger_v2/commandline_log_channel.rb
Normal file
@ -0,0 +1,60 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "colored2"
|
||||
|
||||
module BackupRestoreV2
|
||||
class LoggerV2
|
||||
class CommandlineLogChannel
|
||||
def initialize
|
||||
@logger = ColorfulLoggger.new(STDOUT, formatter: LogFormatter.new.method(:call))
|
||||
end
|
||||
|
||||
def log(severity, message, exception = nil)
|
||||
@logger.log(severity, message)
|
||||
@logger.log(severity, exception) if exception
|
||||
end
|
||||
|
||||
def trigger_event(message)
|
||||
end
|
||||
|
||||
def start_step(severity, message)
|
||||
end
|
||||
|
||||
def stop_step(severity, message)
|
||||
end
|
||||
|
||||
def close
|
||||
@logger.close
|
||||
end
|
||||
|
||||
def create_progress_channel(message)
|
||||
CommandlineProgressChannel.new(message)
|
||||
end
|
||||
|
||||
class ColorfulLoggger < ::Logger
|
||||
SEVERITY_LABELS = [
|
||||
"DEBUG",
|
||||
" INFO".blue,
|
||||
" WARN".yellow,
|
||||
"ERROR".red,
|
||||
"FATAL".red,
|
||||
" ANY",
|
||||
].freeze
|
||||
|
||||
private def format_severity(severity)
|
||||
SEVERITY_LABELS[severity]
|
||||
end
|
||||
end
|
||||
|
||||
class LogFormatter < ::Logger::Formatter
|
||||
def call(severity, time, progname, msg)
|
||||
"#{severity} #{msg2str(msg)}\n"
|
||||
end
|
||||
|
||||
def format_datetime(time)
|
||||
time.utc.iso8601(4)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,65 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "colored2"
|
||||
require "ruby-progressbar"
|
||||
|
||||
module BackupRestoreV2
|
||||
class LoggerV2
|
||||
class CommandlineProgressChannel
|
||||
FORMAT_WITHOUT_PERCENTAGE = "%t | %c / %C | %E"
|
||||
FORMAT_WITH_PERCENTAGE = " %j%% #{FORMAT_WITHOUT_PERCENTAGE}"
|
||||
|
||||
def initialize(message)
|
||||
@message = message
|
||||
|
||||
# see https://github.com/jfelchner/ruby-progressbar/wiki/Formatting
|
||||
@progressbar =
|
||||
::ProgressBar.create(
|
||||
format: FORMAT_WITH_PERCENTAGE,
|
||||
title: @message,
|
||||
autofinish: false,
|
||||
smoothing: 0.5,
|
||||
time: ProgressBarClockTime.new,
|
||||
)
|
||||
end
|
||||
|
||||
def start(max_progress)
|
||||
@max_progress = max_progress
|
||||
|
||||
@progressbar.progress = 0
|
||||
@progressbar.total = @max_progress
|
||||
end
|
||||
|
||||
def increment
|
||||
@progressbar.increment
|
||||
end
|
||||
|
||||
def success
|
||||
reset_current_line
|
||||
@progressbar.format = FORMAT_WITHOUT_PERCENTAGE
|
||||
@progressbar.title = " DONE".green + " #{@message}"
|
||||
@progressbar.finish
|
||||
end
|
||||
|
||||
def error
|
||||
reset_current_line
|
||||
@progressbar.format = FORMAT_WITHOUT_PERCENTAGE
|
||||
@progressbar.title = " FAIL".red + " #{@message}"
|
||||
@progressbar.finish
|
||||
end
|
||||
|
||||
# delete the output of progressbar, because it doesn't overwrite longer lines
|
||||
private def reset_current_line
|
||||
print "\033[K"
|
||||
end
|
||||
|
||||
class ProgressBarClockTime
|
||||
# make the time calculations more accurate
|
||||
# see https://blog.dnsimple.com/2018/03/elapsed-time-with-ruby-the-right-way/
|
||||
def now
|
||||
Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
51
lib/backup_restore_v2/logger_v2/file_log_channel.rb
Normal file
51
lib/backup_restore_v2/logger_v2/file_log_channel.rb
Normal file
@ -0,0 +1,51 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
class LoggerV2
|
||||
class FileLogChannel
|
||||
def initialize(file)
|
||||
@logger = ::Logger.new(file, formatter: LogFormatter.new.method(:call))
|
||||
end
|
||||
|
||||
def log(severity, message, exception = nil)
|
||||
@logger.log(severity, message)
|
||||
@logger.log(severity, exception) if exception
|
||||
end
|
||||
|
||||
def trigger_event(message)
|
||||
end
|
||||
|
||||
def start_step(severity, message)
|
||||
@logger.log(severity, "#{message}...")
|
||||
end
|
||||
|
||||
def stop_step(severity, message)
|
||||
@logger.log(severity, "#{message}... done")
|
||||
end
|
||||
|
||||
def close
|
||||
@logger.close
|
||||
end
|
||||
|
||||
def create_progress_channel(message)
|
||||
FileProgressChannel.new(message, @logger)
|
||||
end
|
||||
|
||||
class LogFormatter < ::Logger::Formatter
|
||||
FORMAT = "[%s] %5s -- %s\n"
|
||||
|
||||
def initialize
|
||||
super
|
||||
end
|
||||
|
||||
def call(severity, time, progname, msg)
|
||||
FORMAT % [format_datetime(time), severity, msg2str(msg)]
|
||||
end
|
||||
|
||||
def format_datetime(time)
|
||||
time.utc.iso8601(4)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
58
lib/backup_restore_v2/logger_v2/file_progress_channel.rb
Normal file
58
lib/backup_restore_v2/logger_v2/file_progress_channel.rb
Normal file
@ -0,0 +1,58 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "colored2"
|
||||
require "ruby-progressbar"
|
||||
|
||||
module BackupRestoreV2
|
||||
class LoggerV2
|
||||
class FileProgressChannel
|
||||
MIN_SECONDS_BETWEEN_PROGRESS_LOGGING = 60
|
||||
|
||||
def initialize(message, logger)
|
||||
@message = message
|
||||
@logger = logger
|
||||
end
|
||||
|
||||
def start(max_progress)
|
||||
@progress = 0
|
||||
@max_progress = max_progress
|
||||
|
||||
@logger.info("#{@message}... 0 / #{@max_progress}")
|
||||
|
||||
@last_output_time = clock_time
|
||||
@last_output_percent = 0
|
||||
end
|
||||
|
||||
def increment
|
||||
@progress += 1
|
||||
|
||||
progress_percent = @progress * 100 / @max_progress
|
||||
current_time = clock_time
|
||||
|
||||
if loggable?(progress_percent, current_time)
|
||||
@last_output_time = current_time
|
||||
@last_output_percent = progress_percent
|
||||
|
||||
@logger.info("#{@message}... #{@progress} / #{@max_progress} | #{progress_percent}%")
|
||||
end
|
||||
end
|
||||
|
||||
def success
|
||||
@logger.info("#{@message}... done!")
|
||||
end
|
||||
|
||||
def error
|
||||
@logger.error("#{@message}... failed!")
|
||||
end
|
||||
|
||||
private def clock_time
|
||||
Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
||||
end
|
||||
|
||||
private def loggable?(progress_percent, current_time)
|
||||
progress_percent > @last_output_percent &&
|
||||
current_time - @last_output_time > MIN_SECONDS_BETWEEN_PROGRESS_LOGGING
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
27
lib/backup_restore_v2/logger_v2/progress_logger.rb
Normal file
27
lib/backup_restore_v2/logger_v2/progress_logger.rb
Normal file
@ -0,0 +1,27 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
class LoggerV2
|
||||
class ProgressLogger
|
||||
def initialize(message, channels)
|
||||
@channels = channels.map { |c| c.create_progress_channel(message) }.compact
|
||||
end
|
||||
|
||||
def start(max_progress)
|
||||
@channels.each { |c| c.start(max_progress) }
|
||||
end
|
||||
|
||||
def increment
|
||||
@channels.each { |c| c.increment }
|
||||
end
|
||||
|
||||
def success
|
||||
@channels.each { |c| c.success }
|
||||
end
|
||||
|
||||
def error
|
||||
@channels.each { |c| c.error }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
80
lib/backup_restore_v2/operation.rb
Normal file
80
lib/backup_restore_v2/operation.rb
Normal file
@ -0,0 +1,80 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
module BackupRestoreV2
|
||||
class OperationRunningError < RuntimeError
|
||||
end
|
||||
|
||||
class Operation
|
||||
RUNNING_KEY = "backup_restore_operation_is_running"
|
||||
ABORT_KEY = "backup_restore_operation_should_shutdown"
|
||||
|
||||
def self.start
|
||||
if !Discourse.redis.set(RUNNING_KEY, "1", ex: 60, nx: true)
|
||||
raise BackupRestoreV2::OperationRunningError
|
||||
end
|
||||
|
||||
@keep_running_thread = keep_running
|
||||
@abort_listener_thread = listen_for_abort_signal
|
||||
Rails.env.test? ? [@keep_running_thread, @abort_listener_thread] : true
|
||||
end
|
||||
|
||||
def self.finish
|
||||
if @keep_running_thread
|
||||
@keep_running_thread.kill
|
||||
@keep_running_thread.join if @keep_running_thread.alive?
|
||||
@keep_running_thread = nil
|
||||
end
|
||||
|
||||
Discourse.redis.del(RUNNING_KEY)
|
||||
|
||||
if @abort_listener_thread
|
||||
@abort_listener_thread.join if @abort_listener_thread.alive?
|
||||
@abort_listener_thread = nil
|
||||
end
|
||||
end
|
||||
|
||||
def self.running?
|
||||
!!Discourse.redis.get(RUNNING_KEY)
|
||||
end
|
||||
|
||||
def self.abort!
|
||||
Discourse.redis.set(ABORT_KEY, "1")
|
||||
end
|
||||
|
||||
def self.should_abort?
|
||||
!!Discourse.redis.get(ABORT_KEY)
|
||||
end
|
||||
|
||||
private_class_method def self.keep_running
|
||||
start_thread("keep_running") do
|
||||
while true
|
||||
# extend the expiry by 1 minute every 30 seconds
|
||||
Discourse.redis.expire(RUNNING_KEY, 60.seconds)
|
||||
sleep(30.seconds)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private_class_method def self.listen_for_abort_signal
|
||||
Discourse.redis.del(ABORT_KEY)
|
||||
|
||||
start_thread("abort_listener") do
|
||||
while running?
|
||||
exit if should_abort?
|
||||
sleep(0.1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private_class_method def self.start_thread(name)
|
||||
db = RailsMultisite::ConnectionManagement.current_db
|
||||
|
||||
Thread.new do
|
||||
RailsMultisite::ConnectionManagement.with_connection(db) do
|
||||
Thread.current.name = name
|
||||
yield
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -777,17 +777,14 @@ module Discourse
|
||||
end
|
||||
|
||||
def self.try_git(git_cmd, default_value)
|
||||
version_value = false
|
||||
|
||||
begin
|
||||
version_value = `#{git_cmd}`.strip
|
||||
value = `#{git_cmd}`.strip
|
||||
rescue StandardError
|
||||
version_value = default_value
|
||||
value = default_value
|
||||
end
|
||||
|
||||
version_value = default_value if version_value.empty?
|
||||
|
||||
version_value
|
||||
value = default_value if value.blank?
|
||||
value
|
||||
end
|
||||
|
||||
# Either returns the site_contact_username user or the first admin.
|
||||
|
||||
@ -109,6 +109,14 @@ class Plugin::Instance
|
||||
|
||||
delegate :name, to: :metadata
|
||||
|
||||
def git_version
|
||||
plugin_root = File.dirname(path)
|
||||
git_dir = File.join(plugin_root, ".git")
|
||||
return nil if !Dir.exist?(git_dir)
|
||||
|
||||
Discourse.try_git("git --git-dir #{git_dir} rev-parse HEAD", nil)
|
||||
end
|
||||
|
||||
def add_to_serializer(serializer, attr, define_include_method = true, &block)
|
||||
reloadable_patch do |plugin|
|
||||
base =
|
||||
|
||||
67
script/cli/commands/backup_command.rb
Normal file
67
script/cli/commands/backup_command.rb
Normal file
@ -0,0 +1,67 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "thor"
|
||||
|
||||
module DiscourseCLI
|
||||
class BackupCommand < Thor
|
||||
desc "create", "Creates a backup"
|
||||
def create
|
||||
DiscourseCLI.load_rails
|
||||
|
||||
with_logger("backup") do |logger|
|
||||
backuper = BackupRestoreV2::Backuper.new(Discourse::SYSTEM_USER_ID, logger)
|
||||
backuper.run
|
||||
exit(1) unless backuper.success
|
||||
end
|
||||
end
|
||||
|
||||
desc "restore FILENAME", "Restores a backup"
|
||||
def restore(filename)
|
||||
end
|
||||
|
||||
desc "list", "Lists existing backups"
|
||||
def list
|
||||
end
|
||||
|
||||
desc "delete", "Deletes a backup"
|
||||
def delete
|
||||
end
|
||||
|
||||
desc "download", "Downloads a backup"
|
||||
def download
|
||||
end
|
||||
|
||||
desc "test", "Testing stuff"
|
||||
def test
|
||||
DiscourseCLI.load_rails
|
||||
|
||||
logger = BackupRestoreV2::LoggerV2.new
|
||||
logger.debug("Hello world")
|
||||
logger.info("Hello world")
|
||||
logger.warn("Hello world")
|
||||
logger.error("Hello world")
|
||||
logger.fatal("Hello world")
|
||||
|
||||
logger.step_with_progress("Preparing rocket") do |progress|
|
||||
max = 1000
|
||||
progress.start(max)
|
||||
(1..max).each do |i|
|
||||
sleep(0.01)
|
||||
progress.increment
|
||||
sleep(2) if i == max
|
||||
end
|
||||
end
|
||||
|
||||
logger.close
|
||||
end
|
||||
|
||||
no_commands do
|
||||
private def with_logger(name)
|
||||
logger = BackupRestoreV2::Logger::CliLogger.new(name)
|
||||
yield logger
|
||||
ensure
|
||||
logger.close if logger
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
24
script/disco
Executable file
24
script/disco
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env ruby
|
||||
# frozen_string_literal: true
|
||||
|
||||
require 'thor'
|
||||
require_relative 'cli/commands/backup_command'
|
||||
|
||||
module DiscourseCLI
|
||||
class Main < Thor
|
||||
desc "backup SUBCOMMAND", "Create, restore and manage backups"
|
||||
subcommand "backup", BackupCommand
|
||||
|
||||
def self.exit_on_failure?
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
def self.load_rails
|
||||
puts "Loading Rails..."
|
||||
Dir.chdir(File.expand_path("..", __dir__)) # rubocop:disable Discourse/NoChdir
|
||||
require File.expand_path("../config/environment", __dir__)
|
||||
end
|
||||
end
|
||||
|
||||
DiscourseCLI::Main.start
|
||||
@ -20,6 +20,13 @@ fork do
|
||||
).run
|
||||
end
|
||||
|
||||
def backup_v2
|
||||
user_id, opts = parse_params
|
||||
logger = BackupRestoreV2::Logger::DefaultLogger.new(user_id, opts[:client_id], "backup")
|
||||
backuper = BackupRestoreV2::Backuper.new(user_id, logger, opts)
|
||||
backuper.run
|
||||
end
|
||||
|
||||
def parse_params
|
||||
user_id = ARGV[1].to_i
|
||||
opts = JSON.parse(ARGV[2], symbolize_names: true)
|
||||
@ -31,6 +38,8 @@ fork do
|
||||
backup
|
||||
when "restore"
|
||||
restore
|
||||
when "backup_v2"
|
||||
backup_v2
|
||||
else
|
||||
raise "Unknown argument: #{ARGV[0]}"
|
||||
end
|
||||
|
||||
72
spec/lib/backup_restore_v2/backup/database_dumper_spec.rb
Normal file
72
spec/lib/backup_restore_v2/backup/database_dumper_spec.rb
Normal file
@ -0,0 +1,72 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
describe BackupRestoreV2::Backup::DatabaseDumper do
|
||||
let(:io) { StringIO.new }
|
||||
|
||||
describe "#dump_public_schema" do
|
||||
it "raises an exception when the last output of pg_dump is an error" do
|
||||
dumper = described_class.new(schema: "non_existent_schema")
|
||||
expect { dumper.dump_schema_into(io) }.to raise_error(
|
||||
BackupRestoreV2::Backup::DatabaseBackupError,
|
||||
)
|
||||
end
|
||||
|
||||
it "dumps the public schema by default" do
|
||||
status = mock("pg_dump status")
|
||||
status.expects(:exitstatus).returns(0).once
|
||||
Process.expects(:last_status).returns(status).once
|
||||
|
||||
thread = mock("thread")
|
||||
thread.stubs(:name=)
|
||||
thread.stubs(:join)
|
||||
|
||||
stdin = StringIO.new
|
||||
stdout = StringIO.new("stdout 1\nstdout 2")
|
||||
stderr = StringIO.new("stderr 1\nstderr 2")
|
||||
Open3
|
||||
.expects(:popen3)
|
||||
.with { |_env, *command| command.include?("--schema=public") }
|
||||
.yields(stdin, stdout, stderr, thread)
|
||||
.once
|
||||
|
||||
dumper = described_class.new
|
||||
dumper.dump_schema_into(io)
|
||||
|
||||
expect(io.string).to eq(stdout.string)
|
||||
expect(dumper.log_lines).to eq(stderr.string.split("\n"))
|
||||
end
|
||||
|
||||
context "with real pg_dump" do
|
||||
# before(:context) and after(:context) runs outside of transaction
|
||||
# rubocop:disable RSpec/BeforeAfterAll
|
||||
before(:context) { DB.exec(<<~SQL) }
|
||||
CREATE SCHEMA backup_test;
|
||||
|
||||
CREATE TABLE backup_test.foo
|
||||
(
|
||||
id integer NOT NULL
|
||||
);
|
||||
|
||||
CREATE VIEW backup_test.pg_stat_statements AS
|
||||
SELECT * FROM backup_test.foo;
|
||||
|
||||
ALTER TABLE backup_test.pg_stat_statements OWNER TO postgres;
|
||||
SQL
|
||||
|
||||
after(:context) { DB.exec("DROP SCHEMA IF EXISTS backup_test CASCADE") }
|
||||
# rubocop:enable RSpec/BeforeAfterAll
|
||||
|
||||
it "successfully dumps a database schema into a gzipped stream" do
|
||||
dumper = described_class.new(schema: "backup_test")
|
||||
dumper.dump_schema_into(io)
|
||||
|
||||
db_dump = Zlib.gunzip(io.string)
|
||||
|
||||
expect(db_dump).to include("CREATE TABLE backup_test.foo")
|
||||
expect(db_dump).to_not include("CREATE VIEW backup_test.pg_stat_statements")
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
196
spec/lib/backup_restore_v2/backup/metadata_writer_spec.rb
Normal file
196
spec/lib/backup_restore_v2/backup/metadata_writer_spec.rb
Normal file
@ -0,0 +1,196 @@
|
||||
# frozen_string_literal: true
|
||||
# rubocop:disable Discourse/OnlyTopLevelMultisiteSpecs
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
describe BackupRestoreV2::Backup::MetadataWriter do
|
||||
subject { described_class.new(backup_uploads_result, backup_optimized_images_result) }
|
||||
let(:backup_uploads_result) { nil }
|
||||
let(:backup_optimized_images_result) { nil }
|
||||
let(:io) { StringIO.new }
|
||||
|
||||
before do
|
||||
BackupRestoreV2::Database.stubs(:current_core_migration_version).returns(20_220_926_152_703)
|
||||
Discourse.stubs(:git_version).returns("c0924f0cae1264ed1d00dda3f6c5417cdb750cf0")
|
||||
Discourse.stubs(:git_branch).returns("main")
|
||||
Discourse.stubs(:base_url).returns("https://discourse.example.com")
|
||||
Discourse.stubs(:asset_host).returns("https://cdn.example.com/foo")
|
||||
Discourse.stubs(:plugins).returns([])
|
||||
Discourse.stubs(:hidden_plugins).returns([])
|
||||
end
|
||||
|
||||
describe "#estimated_file_size" do
|
||||
it "adds 1 kilobyte to the actual filesize" do
|
||||
subject.write_into(io)
|
||||
current_size = io.string.bytesize
|
||||
|
||||
expect(current_size).to be > 256
|
||||
expect(subject.estimated_file_size).to eq(current_size + 1024)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#write" do
|
||||
def expect_metadata(expected_data_overrides = {})
|
||||
subject.write_into(io)
|
||||
expect(io.string).to be_present
|
||||
|
||||
expected_data = {
|
||||
backup_format: 2,
|
||||
discourse_version: Discourse::VERSION::STRING,
|
||||
db_version: 20_220_926_152_703,
|
||||
git_version: "c0924f0cae1264ed1d00dda3f6c5417cdb750cf0",
|
||||
git_branch: "main",
|
||||
base_url: "https://discourse.example.com",
|
||||
cdn_url: "https://cdn.example.com/foo",
|
||||
s3_base_url: nil,
|
||||
s3_cdn_url: nil,
|
||||
db_name: "default",
|
||||
multisite: false,
|
||||
uploads: nil,
|
||||
optimized_images: nil,
|
||||
plugins: [],
|
||||
}.deep_merge(expected_data_overrides)
|
||||
|
||||
data = JSON.parse(io.string, symbolize_names: true)
|
||||
expect(data).to eq(expected_data)
|
||||
end
|
||||
|
||||
context "without uploads" do
|
||||
it "writes the correct metadata" do
|
||||
expect_metadata
|
||||
end
|
||||
end
|
||||
|
||||
context "with uploads and optimized images" do
|
||||
let(:backup_uploads_result) do
|
||||
BackupRestoreV2::Backup::UploadStats.new(
|
||||
total_count: 83_829,
|
||||
included_count: 83_827,
|
||||
missing_count: 2,
|
||||
)
|
||||
end
|
||||
let(:backup_optimized_images_result) do
|
||||
BackupRestoreV2::Backup::UploadStats.new(
|
||||
total_count: 251_487,
|
||||
included_count: 251_481,
|
||||
missing_count: 6,
|
||||
)
|
||||
end
|
||||
|
||||
it "writes the correct metadata" do
|
||||
expect_metadata(
|
||||
uploads: {
|
||||
total_count: 83_829,
|
||||
included_count: 83_827,
|
||||
missing_count: 2,
|
||||
},
|
||||
optimized_images: {
|
||||
total_count: 251_487,
|
||||
included_count: 251_481,
|
||||
missing_count: 6,
|
||||
},
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "with multisite", type: :multisite do
|
||||
it "writes the correct metadata" do
|
||||
test_multisite_connection("second") { expect_metadata(db_name: "second", multisite: true) }
|
||||
end
|
||||
end
|
||||
|
||||
context "with S3 enabled" do
|
||||
before do
|
||||
setup_s3
|
||||
SiteSetting.s3_cdn_url = "https://s3.cdn.com"
|
||||
end
|
||||
|
||||
it "writes the correct metadata" do
|
||||
expect_metadata(
|
||||
s3_base_url: "//s3-upload-bucket.s3.dualstack.us-west-1.amazonaws.com",
|
||||
s3_cdn_url: "https://s3.cdn.com",
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
context "with plugins" do
|
||||
def create_plugin(name, enabled:, git_version: nil, db_version: 0)
|
||||
metadata = Plugin::Metadata.new
|
||||
metadata.name = name
|
||||
|
||||
normalized_name = name.underscore
|
||||
enabled_setting_name = "plugin_#{normalized_name}_enabled"
|
||||
SiteSetting.setting(enabled_setting_name.to_sym, enabled)
|
||||
|
||||
instance = Plugin::Instance.new(metadata, "/tmp/#{normalized_name}/plugin.rb")
|
||||
instance.enabled_site_setting(enabled_setting_name)
|
||||
instance.stubs(:git_version).returns(git_version)
|
||||
|
||||
BackupRestoreV2::Database
|
||||
.stubs(:current_plugin_migration_version)
|
||||
.with(instance)
|
||||
.returns(db_version)
|
||||
|
||||
instance
|
||||
end
|
||||
|
||||
before do
|
||||
visible_plugins = [
|
||||
create_plugin("discourse-solved", enabled: true),
|
||||
create_plugin(
|
||||
"discourse-chat",
|
||||
enabled: true,
|
||||
git_version: "28819613b9aa46b3f80b22fc381c1a3d92de9785",
|
||||
db_version: 20_220_901_034_107,
|
||||
),
|
||||
create_plugin(
|
||||
"discourse-reactions",
|
||||
enabled: true,
|
||||
git_version: "301918a6505a89ae45481636987d9f5988d98805",
|
||||
),
|
||||
create_plugin("discourse-math", enabled: false),
|
||||
create_plugin(
|
||||
"discourse-encrypt",
|
||||
enabled: false,
|
||||
git_version: "82ea2c31d59fd4f8c7275f796a2a10548857a7fa",
|
||||
db_version: 20_201_027_233_335,
|
||||
),
|
||||
]
|
||||
hidden_plugins = [
|
||||
create_plugin("poll", enabled: true),
|
||||
create_plugin("styleguide", enabled: false),
|
||||
]
|
||||
all_plugins = visible_plugins + hidden_plugins
|
||||
Discourse.stubs(:plugins).returns(all_plugins)
|
||||
Discourse.stubs(:hidden_plugins).returns(hidden_plugins)
|
||||
end
|
||||
|
||||
it "includes only visible plugins in metadata" do
|
||||
expect_metadata(
|
||||
plugins: [
|
||||
{
|
||||
name: "discourse-chat",
|
||||
enabled: true,
|
||||
db_version: 20_220_901_034_107,
|
||||
git_version: "28819613b9aa46b3f80b22fc381c1a3d92de9785",
|
||||
},
|
||||
{
|
||||
name: "discourse-encrypt",
|
||||
enabled: false,
|
||||
db_version: 20_201_027_233_335,
|
||||
git_version: "82ea2c31d59fd4f8c7275f796a2a10548857a7fa",
|
||||
},
|
||||
{ name: "discourse-math", enabled: false, db_version: 0, git_version: nil },
|
||||
{
|
||||
name: "discourse-reactions",
|
||||
enabled: true,
|
||||
db_version: 0,
|
||||
git_version: "301918a6505a89ae45481636987d9f5988d98805",
|
||||
},
|
||||
{ name: "discourse-solved", enabled: true, db_version: 0, git_version: nil },
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
259
spec/lib/backup_restore_v2/backup/upload_backuper_spec.rb
Normal file
259
spec/lib/backup_restore_v2/backup/upload_backuper_spec.rb
Normal file
@ -0,0 +1,259 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
require "rubygems/package"
|
||||
|
||||
describe BackupRestoreV2::Backup::UploadBackuper do
|
||||
before { SiteSetting.authorized_extensions = "png|pdf" }
|
||||
|
||||
def initialize_fake_s3
|
||||
setup_s3
|
||||
@fake_s3 = FakeS3.create
|
||||
end
|
||||
|
||||
def create_uploads(fixtures)
|
||||
uploads =
|
||||
fixtures.map do |filename, file|
|
||||
upload = UploadCreator.new(file, filename).create_for(Discourse::SYSTEM_USER_ID)
|
||||
raise "invalid upload" if upload.errors.present?
|
||||
upload
|
||||
end
|
||||
|
||||
paths = uploads.map { |upload| "original/1X/#{upload.sha1}.#{upload.extension}" }
|
||||
files = fixtures.values.map { |file| File.open(file.path, "rb").read }
|
||||
[paths, files]
|
||||
end
|
||||
|
||||
def create_optimized_images(fixtures)
|
||||
store = Discourse.store
|
||||
|
||||
fixtures
|
||||
.map do |filename, file|
|
||||
upload = UploadCreator.new(file, filename).create_for(Discourse::SYSTEM_USER_ID)
|
||||
raise "invalid upload" if upload.errors.present?
|
||||
|
||||
optimized_image = OptimizedImage.create_for(upload, 10, 10)
|
||||
prefixed_path = store.get_path_for_optimized_image(optimized_image)
|
||||
path = prefixed_path.delete_prefix("/#{store.upload_path}/")
|
||||
|
||||
file =
|
||||
if SiteSetting.enable_s3_uploads
|
||||
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
|
||||
bucket.find_object(path)[:body]
|
||||
else
|
||||
File.open(File.join(store.public_dir, prefixed_path), "rb").read
|
||||
end
|
||||
|
||||
[path, file]
|
||||
end
|
||||
.transpose
|
||||
end
|
||||
|
||||
def decompress(io)
|
||||
paths = []
|
||||
files = []
|
||||
|
||||
Zlib::GzipReader.wrap(StringIO.new(io.string)) do |gz|
|
||||
Gem::Package::TarReader.new(gz) do |tar|
|
||||
tar.each do |entry|
|
||||
paths << entry.full_name
|
||||
files << entry.read
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
[paths, files]
|
||||
end
|
||||
|
||||
describe ".include_uploads?" do
|
||||
context "without uploads by users" do
|
||||
it "always returns false" do
|
||||
expect(described_class.include_uploads?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context "with local uploads by users" do
|
||||
before { Fabricate(:upload) }
|
||||
|
||||
it "always returns true" do
|
||||
SiteSetting.include_s3_uploads_in_backups = false
|
||||
expect(described_class.include_uploads?).to eq(true)
|
||||
|
||||
SiteSetting.include_s3_uploads_in_backups = true
|
||||
expect(described_class.include_uploads?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
context "with uploads by users stored on S3" do
|
||||
before do
|
||||
initialize_fake_s3
|
||||
Fabricate(:upload_s3)
|
||||
end
|
||||
|
||||
it "returns true when include_s3_uploads_in_backups is enabled" do
|
||||
SiteSetting.include_s3_uploads_in_backups = true
|
||||
expect(described_class.include_uploads?).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false when include_s3_uploads_in_backups is disabled" do
|
||||
SiteSetting.include_s3_uploads_in_backups = false
|
||||
expect(described_class.include_uploads?).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe ".include_optimized_images?" do
|
||||
context "without uploads by users" do
|
||||
it "always returns false" do
|
||||
SiteSetting.include_thumbnails_in_backups = true
|
||||
expect(described_class.include_optimized_images?).to eq(false)
|
||||
|
||||
SiteSetting.include_thumbnails_in_backups = false
|
||||
expect(described_class.include_optimized_images?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context "with uploads by users" do
|
||||
before { Fabricate(:optimized_image) }
|
||||
|
||||
it "returns true when include_thumbnails_in_backups is enabled" do
|
||||
SiteSetting.include_thumbnails_in_backups = true
|
||||
expect(described_class.include_optimized_images?).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false when include_thumbnails_in_backups is disabled" do
|
||||
SiteSetting.include_thumbnails_in_backups = false
|
||||
expect(described_class.include_optimized_images?).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#compress_uploads" do
|
||||
before { @tmp_directory = Dir.mktmpdir }
|
||||
after { FileUtils.rm_rf(@tmp_directory) }
|
||||
subject { described_class.new(@tmp_directory, BackupRestoreV2::Logger::BaseProgressLogger.new) }
|
||||
|
||||
shared_examples "compression and error logging" do
|
||||
it "compresses existing files and logs missing files" do
|
||||
io = StringIO.new
|
||||
_missing_upload1 = Fabricate(upload_type)
|
||||
|
||||
upload_paths, uploaded_files =
|
||||
create_uploads(
|
||||
"smallest.png" => file_from_fixtures("smallest.png"),
|
||||
"small.pdf" => file_from_fixtures("small.pdf", "pdf"),
|
||||
)
|
||||
|
||||
_missing_upload2 = Fabricate(upload_type)
|
||||
_missing_upload3 = Fabricate(upload_type)
|
||||
|
||||
result = subject.compress_uploads_into(io)
|
||||
decompressed_paths, decompressed_files = decompress(io)
|
||||
|
||||
expect(decompressed_paths).to eq(upload_paths)
|
||||
expect(decompressed_files).to eq(uploaded_files)
|
||||
expect(result).to be_a(BackupRestoreV2::Backup::UploadStats)
|
||||
expect(result.total_count).to eq(5)
|
||||
expect(result.included_count).to eq(2)
|
||||
expect(result.missing_count).to eq(3)
|
||||
end
|
||||
end
|
||||
|
||||
context "with local uploads" do
|
||||
let!(:upload_type) { :upload }
|
||||
|
||||
include_examples "compression and error logging"
|
||||
end
|
||||
|
||||
context "with S3 uploads" do
|
||||
before { initialize_fake_s3 }
|
||||
|
||||
let!(:upload_type) { :upload_s3 }
|
||||
|
||||
include_examples "compression and error logging"
|
||||
end
|
||||
|
||||
context "with mixed uploads" do
|
||||
it "compresses existing files and logs missing files" do
|
||||
local_upload_paths, local_uploaded_files =
|
||||
create_uploads("smallest.png" => file_from_fixtures("smallest.png"))
|
||||
initialize_fake_s3
|
||||
s3_upload_paths, s3_uploaded_files =
|
||||
create_uploads("small.pdf" => file_from_fixtures("small.pdf", "pdf"))
|
||||
upload_paths = local_upload_paths + s3_upload_paths
|
||||
uploaded_files = local_uploaded_files + s3_uploaded_files
|
||||
|
||||
io = StringIO.new
|
||||
result = subject.compress_uploads_into(io)
|
||||
decompressed_paths, decompressed_files = decompress(io)
|
||||
|
||||
expect(decompressed_paths).to eq(upload_paths)
|
||||
expect(decompressed_files).to eq(uploaded_files)
|
||||
expect(result).to be_a(BackupRestoreV2::Backup::UploadStats)
|
||||
expect(result.total_count).to eq(2)
|
||||
expect(result.included_count).to eq(2)
|
||||
expect(result.missing_count).to eq(0)
|
||||
|
||||
SiteSetting.enable_s3_uploads = false
|
||||
io = StringIO.new
|
||||
result = subject.compress_uploads_into(io)
|
||||
decompressed_paths, decompressed_files = decompress(io)
|
||||
|
||||
expect(decompressed_paths).to eq(upload_paths)
|
||||
expect(decompressed_files).to eq(uploaded_files)
|
||||
expect(result).to be_a(BackupRestoreV2::Backup::UploadStats)
|
||||
expect(result.total_count).to eq(2)
|
||||
expect(result.included_count).to eq(2)
|
||||
expect(result.missing_count).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#add_optimized_files" do
|
||||
subject { described_class.new(Dir.mktmpdir, BackupRestoreV2::Logger::BaseProgressLogger.new) }
|
||||
|
||||
it "includes optimized images stored locally" do
|
||||
_missing_image1 = Fabricate(:optimized_image)
|
||||
|
||||
optimized_paths, optimized_files =
|
||||
create_optimized_images(
|
||||
"smallest.png" => file_from_fixtures("smallest.png"),
|
||||
"logo.png" => file_from_fixtures("logo.png"),
|
||||
)
|
||||
|
||||
_missing_image2 = Fabricate(:optimized_image)
|
||||
_missing_image3 = Fabricate(:optimized_image)
|
||||
|
||||
io = StringIO.new
|
||||
result = subject.compress_optimized_images_into(io)
|
||||
decompressed_paths, decompressed_files = decompress(io)
|
||||
|
||||
expect(decompressed_paths).to eq(optimized_paths)
|
||||
expect(decompressed_files).to eq(optimized_files)
|
||||
expect(result).to be_a(BackupRestoreV2::Backup::UploadStats)
|
||||
expect(result.total_count).to eq(5)
|
||||
expect(result.included_count).to eq(2)
|
||||
expect(result.missing_count).to eq(3)
|
||||
end
|
||||
|
||||
it "doesn't include optimized images stored on S3" do
|
||||
initialize_fake_s3
|
||||
|
||||
create_optimized_images(
|
||||
"smallest.png" => file_from_fixtures("smallest.png"),
|
||||
"logo.png" => file_from_fixtures("logo.png"),
|
||||
)
|
||||
|
||||
io = StringIO.new
|
||||
result = subject.compress_optimized_images_into(io)
|
||||
decompressed_paths, decompressed_files = decompress(io)
|
||||
|
||||
expect(decompressed_paths).to be_blank
|
||||
expect(decompressed_files).to be_blank
|
||||
expect(result).to be_a(BackupRestoreV2::Backup::UploadStats)
|
||||
expect(result.total_count).to eq(2)
|
||||
expect(result.included_count).to eq(0)
|
||||
expect(result.missing_count).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
152
spec/lib/backup_restore_v2/backuper_spec.rb
Normal file
152
spec/lib/backup_restore_v2/backuper_spec.rb
Normal file
@ -0,0 +1,152 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
describe BackupRestoreV2::Backuper do
|
||||
fab!(:admin) { Fabricate(:admin) }
|
||||
let!(:logger) do
|
||||
Class
|
||||
.new(BackupRestoreV2::Logger::BaseLogger) do
|
||||
def log(message, level: nil)
|
||||
@logs << message
|
||||
end
|
||||
|
||||
def log_event(event)
|
||||
@logs << event
|
||||
end
|
||||
end
|
||||
.new
|
||||
end
|
||||
|
||||
subject { described_class.new(admin.id, logger) }
|
||||
|
||||
def execute_failed_backup
|
||||
BackupRestoreV2::Operation.stubs(:start).raises(BackupRestoreV2::OperationRunningError)
|
||||
subject.run
|
||||
end
|
||||
|
||||
def execute_successful_backup(site_name: "discourse")
|
||||
freeze_time(Time.parse("2021-03-24T20:27:31Z"))
|
||||
|
||||
tar_writer = mock("tar_writer")
|
||||
expect_tar_creation(tar_writer, site_name)
|
||||
expect_db_dump_added_to_tar(tar_writer)
|
||||
expect_uploads_added_to_tar(tar_writer)
|
||||
expect_optimized_images_added_to_tar(tar_writer)
|
||||
expect_metadata_added_to_tar(tar_writer)
|
||||
|
||||
subject.run
|
||||
end
|
||||
|
||||
def expect_tar_creation(tar_writer, site_name)
|
||||
current_db = RailsMultisite::ConnectionManagement.current_db
|
||||
filename =
|
||||
File.join(Rails.root, "public", "backups", current_db, "#{site_name}-2021-03-24T202731Z.tar")
|
||||
|
||||
MiniTarball::Writer.expects(:create).with(filename).yields(tar_writer).once
|
||||
end
|
||||
|
||||
def expect_db_dump_added_to_tar(tar_writer)
|
||||
output_stream = mock("db_dump_output_stream")
|
||||
|
||||
BackupRestoreV2::Backup::DatabaseDumper
|
||||
.any_instance
|
||||
.expects(:dump_schema_into)
|
||||
.with(output_stream)
|
||||
.once
|
||||
|
||||
tar_writer
|
||||
.expects(:add_file_from_stream)
|
||||
.with(has_entry(name: "dump.sql.gz"))
|
||||
.yields(output_stream)
|
||||
.once
|
||||
end
|
||||
|
||||
def expect_uploads_added_to_tar(tar_writer)
|
||||
output_stream = mock("uploads_stream")
|
||||
|
||||
BackupRestoreV2::Backup::UploadBackuper
|
||||
.any_instance
|
||||
.expects(:compress_uploads_into)
|
||||
.with(output_stream)
|
||||
.returns(BackupRestoreV2::Backup::UploadStats.new(total_count: 42))
|
||||
.once
|
||||
|
||||
BackupRestoreV2::Backup::UploadBackuper.expects(:include_uploads?).returns(true).once
|
||||
|
||||
tar_writer
|
||||
.expects(:add_file_from_stream)
|
||||
.with(has_entry(name: "uploads.tar.gz"))
|
||||
.yields(output_stream)
|
||||
.once
|
||||
end
|
||||
|
||||
def expect_optimized_images_added_to_tar(tar_writer)
|
||||
output_stream = mock("optimized_images_stream")
|
||||
|
||||
BackupRestoreV2::Backup::UploadBackuper
|
||||
.any_instance
|
||||
.expects(:compress_optimized_images_into)
|
||||
.with(output_stream)
|
||||
.returns(BackupRestoreV2::Backup::UploadStats.new(total_count: 42))
|
||||
.once
|
||||
|
||||
BackupRestoreV2::Backup::UploadBackuper.expects(:include_optimized_images?).returns(true).once
|
||||
|
||||
tar_writer
|
||||
.expects(:add_file_from_stream)
|
||||
.with(has_entry(name: "optimized-images.tar.gz"))
|
||||
.yields(output_stream)
|
||||
.once
|
||||
end
|
||||
|
||||
def expect_metadata_added_to_tar(tar_writer)
|
||||
output_stream = mock("metadata_stream")
|
||||
|
||||
BackupRestoreV2::Backup::MetadataWriter
|
||||
.any_instance
|
||||
.expects(:estimated_file_size)
|
||||
.returns(417)
|
||||
.once
|
||||
|
||||
BackupRestoreV2::Backup::MetadataWriter
|
||||
.any_instance
|
||||
.expects(:write_into)
|
||||
.with(output_stream)
|
||||
.once
|
||||
|
||||
tar_writer
|
||||
.expects(:add_file_placeholder)
|
||||
.with(has_entries(name: "meta.json", file_size: 417))
|
||||
.returns(1)
|
||||
.once
|
||||
|
||||
tar_writer.expects(:with_placeholder).with(1).yields(tar_writer).once
|
||||
|
||||
tar_writer
|
||||
.expects(:add_file_from_stream)
|
||||
.with(has_entry(name: "meta.json"))
|
||||
.yields(output_stream)
|
||||
.once
|
||||
end
|
||||
|
||||
it "successfully creates a backup" do
|
||||
execute_successful_backup
|
||||
end
|
||||
|
||||
context "with logging for UI" do
|
||||
it "logs events for successful backup" do
|
||||
execute_successful_backup
|
||||
|
||||
expect(logger.logs.first).to eq("[STARTED]")
|
||||
expect(logger.logs.last).to eq("[SUCCESS]")
|
||||
end
|
||||
|
||||
it "logs events for failed backup" do
|
||||
execute_failed_backup
|
||||
|
||||
expect(logger.logs.first).to eq("[STARTED]")
|
||||
expect(logger.logs.last).to eq("[FAILED]")
|
||||
end
|
||||
end
|
||||
end
|
||||
88
spec/lib/backup_restore_v2/database_spec.rb
Normal file
88
spec/lib/backup_restore_v2/database_spec.rb
Normal file
@ -0,0 +1,88 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
describe BackupRestoreV2::Database do
|
||||
def absolute_path(path)
|
||||
Rails.root.join(path).to_s
|
||||
end
|
||||
|
||||
describe ".database_configuration" do
|
||||
it "returns a DatabaseConfiguration" do
|
||||
expect(described_class.database_configuration).to be_a(BackupRestoreV2::DatabaseConfiguration)
|
||||
end
|
||||
end
|
||||
|
||||
describe ".core_migration_files" do
|
||||
it "returns regular and post_migrate migrations" do
|
||||
files = described_class.core_migration_files
|
||||
expect(files).to include(absolute_path("db/migrate/20120311201341_create_forums.rb"))
|
||||
expect(files).to include(
|
||||
absolute_path(
|
||||
"db/post_migrate/20210528003603_fix_badge_image_avatar_upload_security_and_acls.rb",
|
||||
),
|
||||
)
|
||||
end
|
||||
|
||||
it "doesn't returns plugin migrations" do
|
||||
files = described_class.core_migration_files
|
||||
expect(files).to_not include(
|
||||
absolute_path("plugins/poll/db/migrate/20180820073549_create_polls_tables.rb"),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe ".current_core_migration_version" do
|
||||
it "returns 0 if there are no schema migrations" do
|
||||
ActiveRecord::SchemaMigration.stubs(:table_exists?).returns(false)
|
||||
expect(described_class.current_core_migration_version).to eq(0)
|
||||
end
|
||||
|
||||
it "returns the max schema migration version" do
|
||||
ActiveRecord::SchemaMigration.where("version > '20130213203300'").delete_all
|
||||
expect(described_class.current_core_migration_version).to eq(20_130_213_203_300)
|
||||
end
|
||||
|
||||
it "returns the max schema migration version from a post migration" do
|
||||
ActiveRecord::SchemaMigration.where("version > '20211201221028'").delete_all
|
||||
expect(described_class.current_core_migration_version).to eq(20_211_201_221_028)
|
||||
end
|
||||
|
||||
it "doesn't return version numbers from plugins" do
|
||||
ActiveRecord::SchemaMigration.where("version > '20180820073549'").delete_all
|
||||
|
||||
# Make sure that the migration from the poll plugin exists.
|
||||
# It might be missing if the DB was migrated without plugin migrations.
|
||||
if !ActiveRecord::SchemaMigration.where(version: "20180820073549").exists?
|
||||
ActiveRecord::SchemaMigration.create!(version: "20180820073549")
|
||||
end
|
||||
|
||||
expect(described_class.current_core_migration_version).to eq(20_180_813_074_843)
|
||||
end
|
||||
end
|
||||
|
||||
describe ".current_plugin_migration_version" do
|
||||
let(:plugin) do
|
||||
metadata = Plugin::Metadata.new
|
||||
metadata.name = "poll"
|
||||
Plugin::Instance.new(metadata, absolute_path("plugins/poll/plugin.rb"))
|
||||
end
|
||||
|
||||
it "returns 0 if there are no schema migrations" do
|
||||
ActiveRecord::SchemaMigration.stubs(:table_exists?).returns(false)
|
||||
expect(described_class.current_plugin_migration_version(plugin)).to eq(0)
|
||||
end
|
||||
|
||||
it "returns the max schema migration version" do
|
||||
ActiveRecord::SchemaMigration.where("version > '20220101010000'").delete_all
|
||||
|
||||
# Make sure that the migration from the poll plugin exists.
|
||||
# It might be missing if the DB was migrated without plugin migrations.
|
||||
if !ActiveRecord::SchemaMigration.where(version: "20200804144550").exists?
|
||||
ActiveRecord::SchemaMigration.create!(version: "20200804144550")
|
||||
end
|
||||
|
||||
expect(described_class.current_plugin_migration_version(plugin)).to eq(20_200_804_144_550)
|
||||
end
|
||||
end
|
||||
end
|
||||
179
spec/lib/backup_restore_v2/logger/default_logger_spec.rb
Normal file
179
spec/lib/backup_restore_v2/logger/default_logger_spec.rb
Normal file
@ -0,0 +1,179 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
describe BackupRestoreV2::Logger::DefaultLogger do
|
||||
fab!(:admin) { Fabricate(:admin) }
|
||||
let(:operation) { "backup" }
|
||||
let(:client_id) { 42 }
|
||||
subject { described_class.new(admin.id, client_id, operation) }
|
||||
|
||||
def expect_publish_on_message_bus(*messages)
|
||||
published_messages = freeze_time("2022-10-13T21:20:17Z") { MessageBus.track_publish { yield } }
|
||||
expect(published_messages.size).to eq(messages.size)
|
||||
|
||||
messages.each_with_index do |message, index|
|
||||
published_message = published_messages[index]
|
||||
|
||||
expected_attributes = {
|
||||
channel: BackupRestoreV2::LOGS_CHANNEL,
|
||||
user_ids: [admin.id],
|
||||
client_ids: [client_id],
|
||||
}
|
||||
expect(published_message).to have_attributes(expected_attributes)
|
||||
|
||||
expected_data = { operation: "backup", message: message, timestamp: "2022-10-13 21:20:17" }
|
||||
expect(published_message.data).to include(expected_data)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_event" do
|
||||
it "publishes a message" do
|
||||
expect_publish_on_message_bus("Foo") { subject.log_event("Foo") }
|
||||
end
|
||||
|
||||
it "doesn't output the event as log entry" do
|
||||
subject.log_event("Foo")
|
||||
expect(subject.logs).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_step" do
|
||||
context "without progress" do
|
||||
it "logs the step name" do
|
||||
freeze_time("2022-10-13T21:20:17Z")
|
||||
subject.log_step("Step 1: Launch") {}
|
||||
expect(subject.logs).to eq(["[2022-10-13 21:20:17] Step 1: Launch"])
|
||||
end
|
||||
|
||||
it "publishes a message" do
|
||||
expect_publish_on_message_bus("Step 2: Main Engine Cut Off") do
|
||||
subject.log_step("Step 2: Main Engine Cut Off") {}
|
||||
end
|
||||
end
|
||||
|
||||
it "yields control" do
|
||||
expect { |block|
|
||||
subject.log_step("Step 3: Payload Separation", &block)
|
||||
}.to yield_with_no_args
|
||||
end
|
||||
end
|
||||
|
||||
context "with progress" do
|
||||
it "logs the step name" do
|
||||
freeze_time("2022-10-13T21:20:17Z")
|
||||
subject.log_step("Step 4: Orbital Checkouts", with_progress: true) {}
|
||||
expect(subject.logs).to eq(["[2022-10-13 21:20:17] Step 4: Orbital Checkouts"])
|
||||
end
|
||||
|
||||
it "publishes a message" do
|
||||
expect_publish_on_message_bus("Step 5: Fly-Under") do
|
||||
subject.log_step("Step 5: Fly-Under", with_progress: true) {}
|
||||
end
|
||||
end
|
||||
|
||||
it "yields control" do
|
||||
expect { |block|
|
||||
subject.log_step("Step 6: Rendezvous", with_progress: true, &block)
|
||||
}.to yield_with_args(BackupRestoreV2::Logger::BaseProgressLogger)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log" do
|
||||
it "publishes a message" do
|
||||
expect_publish_on_message_bus("Foo bar") { subject.log("Foo bar") }
|
||||
end
|
||||
|
||||
it "stores log entries" do
|
||||
freeze_time("2022-10-13T21:20:17Z") { subject.log("This is an info.") }
|
||||
|
||||
freeze_time("2022-10-13T21:22:49Z") do
|
||||
subject.log("This is another info.", level: BackupRestoreV2::Logger::INFO)
|
||||
subject.log("This is a warning.", level: BackupRestoreV2::Logger::WARNING)
|
||||
end
|
||||
|
||||
freeze_time("2022-10-13T21:23:38Z") do
|
||||
subject.log("This is an error.", level: BackupRestoreV2::Logger::ERROR)
|
||||
end
|
||||
|
||||
expect(subject.logs).to eq(
|
||||
[
|
||||
"[2022-10-13 21:20:17] INFO This is an info.",
|
||||
"[2022-10-13 21:22:49] INFO This is another info.",
|
||||
"[2022-10-13 21:22:49] WARN This is a warning.",
|
||||
"[2022-10-13 21:23:38] ERROR This is an error.",
|
||||
],
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_warning" do
|
||||
it "enables the warning? flag" do
|
||||
expect(subject.warnings?).to eq(false)
|
||||
|
||||
subject.log_error("Error")
|
||||
expect(subject.warnings?).to eq(false)
|
||||
|
||||
subject.log_warning("Warning")
|
||||
expect(subject.warnings?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#log_error" do
|
||||
it "enables the errors? flag" do
|
||||
expect(subject.errors?).to eq(false)
|
||||
|
||||
subject.log_warning("Warning")
|
||||
expect(subject.errors?).to eq(false)
|
||||
|
||||
subject.log_error("Error")
|
||||
expect(subject.errors?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#warnings?" do
|
||||
it "returns true when warnings have been logged with `#log_warning`" do
|
||||
expect(subject.warnings?).to eq(false)
|
||||
subject.log_warning("Foo")
|
||||
expect(subject.warnings?).to eq(true)
|
||||
end
|
||||
|
||||
it "returns true when warnings have been logged with `#log`" do
|
||||
expect(subject.warnings?).to eq(false)
|
||||
|
||||
subject.log("Foo")
|
||||
expect(subject.warnings?).to eq(false)
|
||||
|
||||
subject.log("Error", level: BackupRestoreV2::Logger::ERROR)
|
||||
expect(subject.warnings?).to eq(false)
|
||||
|
||||
subject.log("Warning", level: BackupRestoreV2::Logger::WARNING)
|
||||
expect(subject.warnings?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
describe "#errors?" do
|
||||
it "returns true when errors have been logged with `#log_error`" do
|
||||
expect(subject.errors?).to eq(false)
|
||||
subject.log_error("Foo")
|
||||
expect(subject.errors?).to eq(true)
|
||||
end
|
||||
|
||||
it "returns true when warnings have been logged with `#log`" do
|
||||
expect(subject.errors?).to eq(false)
|
||||
|
||||
subject.log_warning("Warning")
|
||||
expect(subject.errors?).to eq(false)
|
||||
|
||||
subject.log("Foo")
|
||||
expect(subject.errors?).to eq(false)
|
||||
|
||||
subject.log("Warning", level: BackupRestoreV2::Logger::WARNING)
|
||||
expect(subject.errors?).to eq(false)
|
||||
|
||||
subject.log("Error", level: BackupRestoreV2::Logger::ERROR)
|
||||
expect(subject.errors?).to eq(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -0,0 +1,76 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
describe BackupRestoreV2::LoggerV2::FileLogChannel do
|
||||
let(:logfile) { StringIO.new }
|
||||
subject { described_class.new(logfile) }
|
||||
|
||||
describe "#log" do
|
||||
it "logs all log levels" do
|
||||
freeze_time("2022-10-13T21:20:17Z")
|
||||
|
||||
subject.log(::Logger::Severity::DEBUG, "debug message")
|
||||
subject.log(::Logger::Severity::INFO, "info message")
|
||||
subject.log(::Logger::Severity::WARN, "warn message")
|
||||
subject.log(::Logger::Severity::ERROR, "error message")
|
||||
subject.log(::Logger::Severity::FATAL, "fatal message")
|
||||
subject.log(::Logger::Severity::UNKNOWN, "unknown message")
|
||||
|
||||
expect(logfile.string).to eq <<~TEXT
|
||||
[2022-10-13T21:20:17.0000Z] DEBUG -- debug message
|
||||
[2022-10-13T21:20:17.0000Z] INFO -- info message
|
||||
[2022-10-13T21:20:17.0000Z] WARN -- warn message
|
||||
[2022-10-13T21:20:17.0000Z] ERROR -- error message
|
||||
[2022-10-13T21:20:17.0000Z] FATAL -- fatal message
|
||||
[2022-10-13T21:20:17.0000Z] ANY -- unknown message
|
||||
TEXT
|
||||
end
|
||||
|
||||
it "logs the correct timestamp" do
|
||||
freeze_time("2022-10-13T21:20:17.0000Z")
|
||||
subject.log(::Logger::Severity::INFO, "info message")
|
||||
|
||||
freeze_time("2022-10-13T21:20:17.0028Z")
|
||||
subject.log(::Logger::Severity::INFO, "info message")
|
||||
|
||||
expect(logfile.string).to eq <<~TEXT
|
||||
[2022-10-13T21:20:17.0000Z] INFO -- info message
|
||||
[2022-10-13T21:20:17.0028Z] INFO -- info message
|
||||
TEXT
|
||||
end
|
||||
|
||||
context "with exception" do
|
||||
let(:ex) do
|
||||
raise "Foo"
|
||||
rescue => e
|
||||
e
|
||||
end
|
||||
let(:backtrace) { ex.backtrace.join("\n") }
|
||||
|
||||
it "logs exceptions" do
|
||||
freeze_time("2022-10-13T21:20:17Z")
|
||||
|
||||
subject.log(::Logger::Severity::INFO, "info message", ex)
|
||||
subject.log(::Logger::Severity::ERROR, "error message", ex)
|
||||
|
||||
expect(logfile.string).to eq <<~TEXT
|
||||
[2022-10-13T21:20:17.0000Z] INFO -- info message
|
||||
[2022-10-13T21:20:17.0000Z] INFO -- Foo (RuntimeError)
|
||||
#{backtrace}
|
||||
[2022-10-13T21:20:17.0000Z] ERROR -- error message
|
||||
[2022-10-13T21:20:17.0000Z] ERROR -- Foo (RuntimeError)
|
||||
#{backtrace}
|
||||
TEXT
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "#close" do
|
||||
it "closes the file" do
|
||||
expect(logfile.closed?).to eq(false)
|
||||
subject.close
|
||||
expect(logfile.closed?).to eq(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
75
spec/lib/backup_restore_v2/operation_spec.rb
Normal file
75
spec/lib/backup_restore_v2/operation_spec.rb
Normal file
@ -0,0 +1,75 @@
|
||||
# frozen_string_literal: true
|
||||
# rubocop:disable Discourse/OnlyTopLevelMultisiteSpecs
|
||||
|
||||
require "rails_helper"
|
||||
|
||||
describe BackupRestoreV2::Operation do
|
||||
before do
|
||||
Discourse.redis.del(described_class::RUNNING_KEY)
|
||||
Discourse.redis.del(described_class::ABORT_KEY)
|
||||
end
|
||||
|
||||
it "successfully marks operation as running and finished" do
|
||||
expect(described_class.running?).to eq(false)
|
||||
|
||||
described_class.start
|
||||
expect(described_class.running?).to eq(true)
|
||||
|
||||
expect { described_class.start }.to raise_error(BackupRestoreV2::OperationRunningError)
|
||||
|
||||
described_class.finish
|
||||
expect(described_class.running?).to eq(false)
|
||||
end
|
||||
|
||||
it "doesn't leave 🧟 threads running" do
|
||||
threads = described_class.start
|
||||
expect(threads.size).to eq(2)
|
||||
threads.each { |t| expect(t.status).to be_truthy }
|
||||
|
||||
described_class.finish
|
||||
threads.each { |t| expect(t.status).to be_falsey }
|
||||
end
|
||||
|
||||
it "exits the process when abort signal is set" do
|
||||
threads = described_class.start
|
||||
|
||||
expect do
|
||||
described_class.abort!
|
||||
threads.each do |thread|
|
||||
thread.join(5)
|
||||
thread.kill
|
||||
end
|
||||
end.to raise_error(SystemExit)
|
||||
|
||||
described_class.finish
|
||||
threads.each { |t| expect(t.status).to be_falsey }
|
||||
end
|
||||
|
||||
it "clears an existing abort signal before it starts" do
|
||||
described_class.abort!
|
||||
expect(described_class.should_abort?).to eq(true)
|
||||
|
||||
described_class.start
|
||||
expect(described_class.should_abort?).to eq(false)
|
||||
described_class.finish
|
||||
end
|
||||
|
||||
context "with multisite", type: :multisite do
|
||||
it "uses the correct Redis namespace" do
|
||||
test_multisite_connection("second") do
|
||||
threads = described_class.start
|
||||
|
||||
expect do
|
||||
described_class.abort!
|
||||
threads.each do |thread|
|
||||
thread.join(5)
|
||||
thread.kill
|
||||
end
|
||||
end.to raise_error(SystemExit)
|
||||
|
||||
described_class.finish
|
||||
threads.each { |t| expect(t.status).to be_falsey }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -5,8 +5,14 @@ module MessageBus::DiagnosticsHelper
|
||||
id = super(channel, data, opts)
|
||||
if @tracking && (@channel.nil? || @channel == channel)
|
||||
m = MessageBus::Message.new(-1, id, channel, data)
|
||||
m.user_ids = opts[:user_ids] if opts
|
||||
m.group_ids = opts[:group_ids] if opts
|
||||
|
||||
if opts
|
||||
m.user_ids = opts[:user_ids]
|
||||
m.group_ids = opts[:group_ids]
|
||||
m.client_ids = opts[:client_ids]
|
||||
m.site_id = opts[:site_id]
|
||||
end
|
||||
|
||||
@tracking << m
|
||||
end
|
||||
id
|
||||
|
||||
@ -12,6 +12,7 @@ class FakeS3
|
||||
)
|
||||
end
|
||||
s3.stub_s3_helper
|
||||
s3.stub_file_downloader
|
||||
s3
|
||||
end
|
||||
|
||||
@ -55,6 +56,11 @@ class FakeS3
|
||||
end
|
||||
end
|
||||
|
||||
def stub_file_downloader
|
||||
@file_downloader = FakeS3FileDownloader.new(@buckets)
|
||||
Aws::S3::FileDownloader.stubs(:new).returns(@file_downloader)
|
||||
end
|
||||
|
||||
def operation_called?(name)
|
||||
@operations.any? do |operation|
|
||||
operation[:name] == name && (block_given? ? yield(operation) : true)
|
||||
@ -78,7 +84,8 @@ class FakeS3
|
||||
|
||||
def calculate_etag(context)
|
||||
# simple, reproducible ETag calculation
|
||||
Digest::MD5.hexdigest(context.params.to_json)
|
||||
content = context.params[:body].presence || context.params.to_json
|
||||
Digest::MD5.hexdigest(content)
|
||||
end
|
||||
|
||||
def stub_methods
|
||||
@ -105,7 +112,7 @@ class FakeS3
|
||||
log_operation(context)
|
||||
|
||||
if object = find_object(context.params)
|
||||
{ content_length: object[:size], body: "" }
|
||||
{ content_length: object[:size], body: object[:body] }
|
||||
else
|
||||
{ status_code: 404, headers: {}, body: "" }
|
||||
end
|
||||
@ -175,6 +182,8 @@ class FakeS3Bucket
|
||||
end
|
||||
|
||||
def put_object(obj)
|
||||
obj[:body] = File.binread(obj[:body].path) if obj[:body]&.is_a?(File)
|
||||
|
||||
@objects[obj[:key]] = obj
|
||||
end
|
||||
|
||||
@ -186,3 +195,16 @@ class FakeS3Bucket
|
||||
@objects[key]
|
||||
end
|
||||
end
|
||||
|
||||
class FakeS3FileDownloader
|
||||
def initialize(buckets)
|
||||
@buckets = buckets
|
||||
end
|
||||
|
||||
def download(destination, options = {})
|
||||
bucket = options[:bucket]
|
||||
key = options[:key]
|
||||
object = @buckets[bucket].find_object(key)
|
||||
File.binwrite(destination, object[:body])
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user