",
+ ),
+ )
+ rescue URI::Error
+ # If the URL is weird, remove the iframe
+ i.remove
+ end
end
- end
end
def format_html
@@ -189,67 +220,93 @@ module Email
reset_tables
html_lang = SiteSetting.default_locale.sub("_", "-")
- style('html', nil, lang: html_lang, 'xml:lang' => html_lang)
- style('body', "line-height: 1.4; text-align:#{ Rtl.new(nil).enabled? ? 'right' : 'left' };")
- style('body', nil, dir: Rtl.new(nil).enabled? ? 'rtl' : 'ltr')
+ style("html", nil, :lang => html_lang, "xml:lang" => html_lang)
+ style("body", "line-height: 1.4; text-align:#{Rtl.new(nil).enabled? ? "right" : "left"};")
+ style("body", nil, dir: Rtl.new(nil).enabled? ? "rtl" : "ltr")
- style('.with-dir',
- "text-align:#{ Rtl.new(nil).enabled? ? 'right' : 'left' };",
- dir: Rtl.new(nil).enabled? ? 'rtl' : 'ltr'
+ style(
+ ".with-dir",
+ "text-align:#{Rtl.new(nil).enabled? ? "right" : "left"};",
+ dir: Rtl.new(nil).enabled? ? "rtl" : "ltr",
)
- style('blockquote > :first-child', 'margin-top: 0;')
- style('blockquote > :last-child', 'margin-bottom: 0;')
- style('blockquote > p', 'padding: 0;')
+ style("blockquote > :first-child", "margin-top: 0;")
+ style("blockquote > :last-child", "margin-bottom: 0;")
+ style("blockquote > p", "padding: 0;")
- style('.with-accent-colors', "background-color: #{SiteSetting.email_accent_bg_color}; color: #{SiteSetting.email_accent_fg_color};")
- style('h4', 'color: #222;')
- style('h3', 'margin: 30px 0 10px;')
- style('hr', 'background-color: #ddd; height: 1px; border: 1px;')
- style('a', "text-decoration: none; font-weight: bold; color: #{SiteSetting.email_link_color};")
- style('ul', 'margin: 0 0 0 10px; padding: 0 0 0 20px;')
- style('li', 'padding-bottom: 10px')
- style('div.summary-footer', 'color:#666; font-size:95%; text-align:center; padding-top:15px;')
- style('span.post-count', 'margin: 0 5px; color: #777;')
- style('pre', 'word-wrap: break-word; max-width: 694px;')
- style('code', 'background-color: #f9f9f9; padding: 2px 5px;')
- style('pre code', 'display: block; background-color: #f9f9f9; overflow: auto; padding: 5px;')
- style('pre.onebox code', 'white-space: normal;')
- style('pre code li', 'white-space: pre;')
- style('.featured-topic a', "text-decoration: none; font-weight: bold; color: #{SiteSetting.email_link_color}; line-height:1.5em;")
- style('.summary-email', "-moz-box-sizing:border-box;-ms-text-size-adjust:100%;-webkit-box-sizing:border-box;-webkit-text-size-adjust:100%;box-sizing:border-box;color:#0a0a0a;font-family:Arial,sans-serif;font-size:14px;font-weight:400;line-height:1.3;margin:0;min-width:100%;padding:0;width:100%")
+ style(
+ ".with-accent-colors",
+ "background-color: #{SiteSetting.email_accent_bg_color}; color: #{SiteSetting.email_accent_fg_color};",
+ )
+ style("h4", "color: #222;")
+ style("h3", "margin: 30px 0 10px;")
+ style("hr", "background-color: #ddd; height: 1px; border: 1px;")
+ style(
+ "a",
+ "text-decoration: none; font-weight: bold; color: #{SiteSetting.email_link_color};",
+ )
+ style("ul", "margin: 0 0 0 10px; padding: 0 0 0 20px;")
+ style("li", "padding-bottom: 10px")
+ style("div.summary-footer", "color:#666; font-size:95%; text-align:center; padding-top:15px;")
+ style("span.post-count", "margin: 0 5px; color: #777;")
+ style("pre", "word-wrap: break-word; max-width: 694px;")
+ style("code", "background-color: #f9f9f9; padding: 2px 5px;")
+ style("pre code", "display: block; background-color: #f9f9f9; overflow: auto; padding: 5px;")
+ style("pre.onebox code", "white-space: normal;")
+ style("pre code li", "white-space: pre;")
+ style(
+ ".featured-topic a",
+ "text-decoration: none; font-weight: bold; color: #{SiteSetting.email_link_color}; line-height:1.5em;",
+ )
+ style(
+ ".summary-email",
+ "-moz-box-sizing:border-box;-ms-text-size-adjust:100%;-webkit-box-sizing:border-box;-webkit-text-size-adjust:100%;box-sizing:border-box;color:#0a0a0a;font-family:Arial,sans-serif;font-size:14px;font-weight:400;line-height:1.3;margin:0;min-width:100%;padding:0;width:100%",
+ )
- style('.previous-discussion', 'font-size: 17px; color: #444; margin-bottom:10px;')
- style('.notification-date', "text-align:right;color:#999999;padding-right:5px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;font-size:11px")
- style('.username', "font-size:13px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;text-decoration:none;font-weight:bold")
- style('.username-link', "color:#{SiteSetting.email_link_color};")
- style('.username-title', "color:#777;margin-left:5px;")
- style('.user-title', "font-size:13px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;text-decoration:none;margin-left:5px;color: #999;")
- style('.post-wrapper', "margin-bottom:25px;")
- style('.user-avatar', 'vertical-align:top;width:55px;')
- style('.user-avatar img', nil, width: '45', height: '45')
- style('hr', 'background-color: #ddd; height: 1px; border: 1px;')
- style('.rtl', 'direction: rtl;')
- style('div.body', 'padding-top:5px;')
- style('.whisper div.body', 'font-style: italic; color: #9c9c9c;')
- style('.lightbox-wrapper .meta', 'display: none')
- style('div.undecorated-link-footer a', "font-weight: normal;")
- style('.mso-accent-link', "mso-border-alt: 6px solid #{SiteSetting.email_accent_bg_color}; background-color: #{SiteSetting.email_accent_bg_color};")
- style('.reply-above-line', "font-size: 10px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;color: #b5b5b5;padding: 5px 0px 20px;border-top: 1px dotted #ddd;")
+ style(".previous-discussion", "font-size: 17px; color: #444; margin-bottom:10px;")
+ style(
+ ".notification-date",
+ "text-align:right;color:#999999;padding-right:5px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;font-size:11px",
+ )
+ style(
+ ".username",
+ "font-size:13px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;text-decoration:none;font-weight:bold",
+ )
+ style(".username-link", "color:#{SiteSetting.email_link_color};")
+ style(".username-title", "color:#777;margin-left:5px;")
+ style(
+ ".user-title",
+ "font-size:13px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;text-decoration:none;margin-left:5px;color: #999;",
+ )
+ style(".post-wrapper", "margin-bottom:25px;")
+ style(".user-avatar", "vertical-align:top;width:55px;")
+ style(".user-avatar img", nil, width: "45", height: "45")
+ style("hr", "background-color: #ddd; height: 1px; border: 1px;")
+ style(".rtl", "direction: rtl;")
+ style("div.body", "padding-top:5px;")
+ style(".whisper div.body", "font-style: italic; color: #9c9c9c;")
+ style(".lightbox-wrapper .meta", "display: none")
+ style("div.undecorated-link-footer a", "font-weight: normal;")
+ style(
+ ".mso-accent-link",
+ "mso-border-alt: 6px solid #{SiteSetting.email_accent_bg_color}; background-color: #{SiteSetting.email_accent_bg_color};",
+ )
+ style(
+ ".reply-above-line",
+ "font-size: 10px;font-family:'lucida grande',tahoma,verdana,arial,sans-serif;color: #b5b5b5;padding: 5px 0px 20px;border-top: 1px dotted #ddd;",
+ )
onebox_styles
plugin_styles
dark_mode_styles
- style('.post-excerpt img', "max-width: 50%; max-height: #{MAX_IMAGE_DIMENSION}px;")
+ style(".post-excerpt img", "max-width: 50%; max-height: #{MAX_IMAGE_DIMENSION}px;")
format_custom
end
def format_custom
- custom_styles.each do |selector, value|
- style(selector, value)
- end
+ custom_styles.each { |selector, value| style(selector, value) }
end
# this method is reserved for styles specific to plugin
@@ -258,33 +315,46 @@ module Email
end
def inline_secure_images(attachments, attachments_index)
- stripped_media = @fragment.css('[data-stripped-secure-media], [data-stripped-secure-upload]')
+ stripped_media = @fragment.css("[data-stripped-secure-media], [data-stripped-secure-upload]")
upload_shas = {}
stripped_media.each do |div|
- url = div['data-stripped-secure-media'] || div['data-stripped-secure-upload']
+ url = div["data-stripped-secure-media"] || div["data-stripped-secure-upload"]
filename = File.basename(url)
filename_bare = filename.gsub(File.extname(filename), "")
- sha1 = filename_bare.partition('_').first
+ sha1 = filename_bare.partition("_").first
upload_shas[url] = sha1
end
uploads = Upload.select(:original_filename, :sha1).where(sha1: upload_shas.values)
stripped_media.each do |div|
- upload = uploads.find do |upl|
- upl.sha1 == (upload_shas[div['data-stripped-secure-media']] || upload_shas[div['data-stripped-secure-upload']])
- end
+ upload =
+ uploads.find do |upl|
+ upl.sha1 ==
+ (
+ upload_shas[div["data-stripped-secure-media"]] ||
+ upload_shas[div["data-stripped-secure-upload"]]
+ )
+ end
next if !upload
if attachments[attachments_index[upload.sha1]]
url = attachments[attachments_index[upload.sha1]].url
- onebox_type = div['data-onebox-type']
- style = if onebox_type
- onebox_style = onebox_type == "avatar-inline" ? ONEBOX_INLINE_AVATAR_STYLE : ONEBOX_IMAGE_THUMBNAIL_STYLE
- "#{onebox_style} #{ONEBOX_IMAGE_BASE_STYLE}"
- else
- calculate_width_and_height_style(div)
- end
+ onebox_type = div["data-onebox-type"]
+ style =
+ if onebox_type
+ onebox_style =
+ (
+ if onebox_type == "avatar-inline"
+ ONEBOX_INLINE_AVATAR_STYLE
+ else
+ ONEBOX_IMAGE_THUMBNAIL_STYLE
+ end
+ )
+ "#{onebox_style} #{ONEBOX_IMAGE_BASE_STYLE}"
+ else
+ calculate_width_and_height_style(div)
+ end
div.add_next_sibling(<<~HTML)
@@ -309,39 +379,45 @@ module Email
end
def strip_avatars_and_emojis
- @fragment.search('img').each do |img|
- next unless img['src']
+ @fragment
+ .search("img")
+ .each do |img|
+ next unless img["src"]
- if img['src'][/_avatar/]
- img.parent['style'] = "vertical-align: top;" if img.parent&.name == 'td'
- img.remove
- end
+ if img["src"][/_avatar/]
+ img.parent["style"] = "vertical-align: top;" if img.parent&.name == "td"
+ img.remove
+ end
- if img['title'] && img['src'][/\/_?emoji\//]
- img.add_previous_sibling(img['title'] || "emoji")
- img.remove
+ if img["title"] && img["src"][%r{/_?emoji/}]
+ img.add_previous_sibling(img["title"] || "emoji")
+ img.remove
+ end
end
- end
end
def decorate_hashtags
- @fragment.search(".hashtag-cooked").each do |hashtag|
- hashtag.children.each(&:remove)
- hashtag.add_child(<<~HTML)
+ @fragment
+ .search(".hashtag-cooked")
+ .each do |hashtag|
+ hashtag.children.each(&:remove)
+ hashtag.add_child(<<~HTML)
##{hashtag["data-slug"]}
HTML
- end
+ end
end
def make_all_links_absolute
site_uri = URI(Discourse.base_url)
- @fragment.css("a").each do |link|
- begin
- link["href"] = "#{site_uri}#{link['href']}" unless URI(link["href"].to_s).host.present?
- rescue URI::Error
- # leave it
+ @fragment
+ .css("a")
+ .each do |link|
+ begin
+ link["href"] = "#{site_uri}#{link["href"]}" unless URI(link["href"].to_s).host.present?
+ rescue URI::Error
+ # leave it
+ end
end
- end
end
private
@@ -350,8 +426,16 @@ module Email
# When we ship the email template and its styles we strip all css classes so to give our
# dark mode styles we are including in the template a selector we add a data-attr of 'dm=value' to
# the appropriate place
- style(".digest-header, .digest-topic, .digest-topic-title-wrapper, .digest-topic-stats, .popular-post-excerpt", nil, dm: "header")
- style(".digest-content, .header-popular-posts, .spacer, .popular-post-spacer, .popular-post-meta, .digest-new-header, .digest-new-topic, .body", nil, dm: "body")
+ style(
+ ".digest-header, .digest-topic, .digest-topic-title-wrapper, .digest-topic-stats, .popular-post-excerpt",
+ nil,
+ dm: "header",
+ )
+ style(
+ ".digest-content, .header-popular-posts, .spacer, .popular-post-spacer, .popular-post-meta, .digest-new-header, .digest-new-topic, .body",
+ nil,
+ dm: "body",
+ )
style(".with-accent-colors, .digest-content-header", nil, dm: "body_primary")
style(".digest-topic-body", nil, dm: "topic-body")
style(".summary-footer", nil, dm: "text-color")
@@ -363,18 +447,19 @@ module Email
host = forum_uri.host
scheme = forum_uri.scheme
- @fragment.css('[href]').each do |element|
- href = element['href']
- if href.start_with?("\/\/#{host}")
- element['href'] = "#{scheme}:#{href}"
+ @fragment
+ .css("[href]")
+ .each do |element|
+ href = element["href"]
+ element["href"] = "#{scheme}:#{href}" if href.start_with?("\/\/#{host}")
end
- end
end
def calculate_width_and_height_style(div)
- width = div['data-width']
- height = div['data-height']
- if width.present? && height.present? && height.to_i < MAX_IMAGE_DIMENSION && width.to_i < MAX_IMAGE_DIMENSION
+ width = div["data-width"]
+ height = div["data-height"]
+ if width.present? && height.present? && height.to_i < MAX_IMAGE_DIMENSION &&
+ width.to_i < MAX_IMAGE_DIMENSION
"width: #{width}px; height: #{height}px;"
else
"max-width: 50%; max-height: #{MAX_IMAGE_DIMENSION}px;"
@@ -386,59 +471,68 @@ module Email
# notification template but that may not catch everything
PrettyText.strip_secure_uploads(@fragment)
- style('div.secure-upload-notice', 'border: 5px solid #e9e9e9; padding: 5px; display: inline-block;')
- style('div.secure-upload-notice a', "color: #{SiteSetting.email_link_color}")
+ style(
+ "div.secure-upload-notice",
+ "border: 5px solid #e9e9e9; padding: 5px; display: inline-block;",
+ )
+ style("div.secure-upload-notice a", "color: #{SiteSetting.email_link_color}")
end
def correct_first_body_margin
- @fragment.css('div.body p').each do |element|
- element['style'] = "margin-top:0; border: 0;"
- end
+ @fragment.css("div.body p").each { |element| element["style"] = "margin-top:0; border: 0;" }
end
def correct_footer_style
- @fragment.css('.footer').each do |element|
- element['style'] = "color:#666;"
- element.css('a').each do |inner|
- inner['style'] = "color:#666;"
+ @fragment
+ .css(".footer")
+ .each do |element|
+ element["style"] = "color:#666;"
+ element.css("a").each { |inner| inner["style"] = "color:#666;" }
end
- end
end
def correct_footer_style_highlight_first
footernum = 0
- @fragment.css('.footer.highlight').each do |element|
- linknum = 0
- element.css('a').each do |inner|
- # we want the first footer link to be specially highlighted as IMPORTANT
- if footernum == (0) && linknum == (0)
- bg_color = SiteSetting.email_accent_bg_color
- inner['style'] = "background-color: #{bg_color}; color: #{SiteSetting.email_accent_fg_color}; border-top: 4px solid #{bg_color}; border-right: 6px solid #{bg_color}; border-bottom: 4px solid #{bg_color}; border-left: 6px solid #{bg_color}; display: inline-block; font-weight: bold;"
- end
+ @fragment
+ .css(".footer.highlight")
+ .each do |element|
+ linknum = 0
+ element
+ .css("a")
+ .each do |inner|
+ # we want the first footer link to be specially highlighted as IMPORTANT
+ if footernum == (0) && linknum == (0)
+ bg_color = SiteSetting.email_accent_bg_color
+ inner[
+ "style"
+ ] = "background-color: #{bg_color}; color: #{SiteSetting.email_accent_fg_color}; border-top: 4px solid #{bg_color}; border-right: 6px solid #{bg_color}; border-bottom: 4px solid #{bg_color}; border-left: 6px solid #{bg_color}; display: inline-block; font-weight: bold;"
+ end
+ return
+ end
return
end
- return
- end
end
def strip_classes_and_ids
- @fragment.css('*').each do |element|
- element.delete('class')
- element.delete('id')
- end
+ @fragment
+ .css("*")
+ .each do |element|
+ element.delete("class")
+ element.delete("id")
+ end
end
def reset_tables
- style('table', nil, cellspacing: '0', cellpadding: '0', border: '0')
+ style("table", nil, cellspacing: "0", cellpadding: "0", border: "0")
end
def style(selector, style, attribs = {})
- @fragment.css(selector).each do |element|
- add_styles(element, style) if style
- attribs.each do |k, v|
- element[k] = v
+ @fragment
+ .css(selector)
+ .each do |element|
+ add_styles(element, style) if style
+ attribs.each { |k, v| element[k] = v }
end
- end
end
end
end
diff --git a/lib/email/validator.rb b/lib/email/validator.rb
index 2795055a93..764bb7e13c 100644
--- a/lib/email/validator.rb
+++ b/lib/email/validator.rb
@@ -10,7 +10,7 @@ module Email
end
def self.ensure_valid_address_lists!(mail)
- [:to, :cc, :bcc].each do |field|
+ %i[to cc bcc].each do |field|
addresses = mail[field]
if addresses&.errors.present?
@@ -21,7 +21,8 @@ module Email
def self.ensure_valid_date!(mail)
if mail.date.nil?
- raise Email::Receiver::InvalidPost, I18n.t("system_messages.email_reject_invalid_post_specified.date_invalid")
+ raise Email::Receiver::InvalidPost,
+ I18n.t("system_messages.email_reject_invalid_post_specified.date_invalid")
end
end
end
diff --git a/lib/email_backup_token.rb b/lib/email_backup_token.rb
index 098f7c7e07..0aef08ca54 100644
--- a/lib/email_backup_token.rb
+++ b/lib/email_backup_token.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
class EmailBackupToken
-
def self.key(user_id)
"email-backup-token:#{user_id}"
end
diff --git a/lib/email_controller_helper/base_email_unsubscriber.rb b/lib/email_controller_helper/base_email_unsubscriber.rb
index b04560040c..46267cb36e 100644
--- a/lib/email_controller_helper/base_email_unsubscriber.rb
+++ b/lib/email_controller_helper/base_email_unsubscriber.rb
@@ -20,7 +20,7 @@ module EmailControllerHelper
controller.instance_variable_set(
:@unsubscribed_from_all,
- key_owner.user_option.unsubscribed_from_all?
+ key_owner.user_option.unsubscribed_from_all?,
)
end
@@ -38,10 +38,12 @@ module EmailControllerHelper
end
if params[:unsubscribe_all]
- key_owner.user_option.update_columns(email_digests: false,
- email_level: UserOption.email_level_types[:never],
- email_messages_level: UserOption.email_level_types[:never],
- mailing_list_mode: false)
+ key_owner.user_option.update_columns(
+ email_digests: false,
+ email_level: UserOption.email_level_types[:never],
+ email_messages_level: UserOption.email_level_types[:never],
+ mailing_list_mode: false,
+ )
updated = true
end
diff --git a/lib/email_controller_helper/digest_email_unsubscriber.rb b/lib/email_controller_helper/digest_email_unsubscriber.rb
index 7291d01258..b96e77402c 100644
--- a/lib/email_controller_helper/digest_email_unsubscriber.rb
+++ b/lib/email_controller_helper/digest_email_unsubscriber.rb
@@ -12,22 +12,34 @@ module EmailControllerHelper
never = frequencies.delete_at(0)
allowed_frequencies = %w[never weekly every_month every_six_months]
- result = frequencies.reduce(frequencies: [], current: nil, selected: nil, take_next: false) do |memo, v|
- memo[:current] = v[:name] if v[:value] == frequency_in_minutes && email_digests
- next(memo) unless allowed_frequencies.include?(v[:name])
+ result =
+ frequencies.reduce(
+ frequencies: [],
+ current: nil,
+ selected: nil,
+ take_next: false,
+ ) do |memo, v|
+ memo[:current] = v[:name] if v[:value] == frequency_in_minutes && email_digests
+ next(memo) unless allowed_frequencies.include?(v[:name])
- memo.tap do |m|
- m[:selected] = v[:value] if m[:take_next] && email_digests
- m[:frequencies] << [I18n.t("unsubscribe.digest_frequency.#{v[:name]}"), v[:value]]
- m[:take_next] = !m[:take_next] && m[:current]
+ memo.tap do |m|
+ m[:selected] = v[:value] if m[:take_next] && email_digests
+ m[:frequencies] << [I18n.t("unsubscribe.digest_frequency.#{v[:name]}"), v[:value]]
+ m[:take_next] = !m[:take_next] && m[:current]
+ end
end
- end
- digest_frequencies = result.slice(:frequencies, :current, :selected).tap do |r|
- r[:frequencies] << [I18n.t("unsubscribe.digest_frequency.#{never[:name]}"), never[:value]]
- r[:selected] ||= never[:value]
- r[:current] ||= never[:name]
- end
+ digest_frequencies =
+ result
+ .slice(:frequencies, :current, :selected)
+ .tap do |r|
+ r[:frequencies] << [
+ I18n.t("unsubscribe.digest_frequency.#{never[:name]}"),
+ never[:value],
+ ]
+ r[:selected] ||= never[:value]
+ r[:current] ||= never[:name]
+ end
controller.instance_variable_set(:@digest_frequencies, digest_frequencies)
end
@@ -40,7 +52,7 @@ module EmailControllerHelper
unsubscribe_key.user.user_option.update_columns(
digest_after_minutes: digest_frequency,
- email_digests: digest_frequency.positive?
+ email_digests: digest_frequency.positive?,
)
updated = true
end
diff --git a/lib/email_controller_helper/topic_email_unsubscriber.rb b/lib/email_controller_helper/topic_email_unsubscriber.rb
index 6265853f54..eda37b7d66 100644
--- a/lib/email_controller_helper/topic_email_unsubscriber.rb
+++ b/lib/email_controller_helper/topic_email_unsubscriber.rb
@@ -11,16 +11,25 @@ module EmailControllerHelper
controller.instance_variable_set(:@topic, topic)
controller.instance_variable_set(
:@watching_topic,
- TopicUser.exists?(user: key_owner, notification_level: watching, topic_id: topic.id)
+ TopicUser.exists?(user: key_owner, notification_level: watching, topic_id: topic.id),
)
return if topic.category_id.blank?
- return if !CategoryUser.exists?(user: key_owner, notification_level: CategoryUser.watching_levels, category_id: topic.category_id)
+ if !CategoryUser.exists?(
+ user: key_owner,
+ notification_level: CategoryUser.watching_levels,
+ category_id: topic.category_id,
+ )
+ return
+ end
controller.instance_variable_set(
:@watched_count,
- TopicUser.joins(:topic)
- .where(user: key_owner, notification_level: watching).where(topics: { category_id: topic.category_id }).count
+ TopicUser
+ .joins(:topic)
+ .where(user: key_owner, notification_level: watching)
+ .where(topics: { category_id: topic.category_id })
+ .count,
)
end
@@ -31,27 +40,33 @@ module EmailControllerHelper
return updated if topic.nil?
if params[:unwatch_topic]
- TopicUser.where(topic_id: topic.id, user_id: key_owner.id)
- .update_all(notification_level: TopicUser.notification_levels[:tracking])
+ TopicUser.where(topic_id: topic.id, user_id: key_owner.id).update_all(
+ notification_level: TopicUser.notification_levels[:tracking],
+ )
updated = true
end
if params[:unwatch_category] && topic.category_id
- TopicUser.joins(:topic)
+ TopicUser
+ .joins(:topic)
.where(user: key_owner, notification_level: TopicUser.notification_levels[:watching])
.where(topics: { category_id: topic.category_id })
.update_all(notification_level: TopicUser.notification_levels[:tracking])
- CategoryUser
- .where(user_id: key_owner.id, category_id: topic.category_id, notification_level: CategoryUser.watching_levels)
- .destroy_all
+ CategoryUser.where(
+ user_id: key_owner.id,
+ category_id: topic.category_id,
+ notification_level: CategoryUser.watching_levels,
+ ).destroy_all
updated = true
end
if params[:mute_topic]
- TopicUser.where(topic_id: topic.id, user_id: key_owner.id).update_all(notification_level: TopicUser.notification_levels[:muted])
+ TopicUser.where(topic_id: topic.id, user_id: key_owner.id).update_all(
+ notification_level: TopicUser.notification_levels[:muted],
+ )
updated = true
end
diff --git a/lib/email_cook.rb b/lib/email_cook.rb
index 89b59891f1..2c76e1f2ff 100644
--- a/lib/email_cook.rb
+++ b/lib/email_cook.rb
@@ -2,9 +2,9 @@
# A very simple formatter for imported emails
class EmailCook
-
def self.raw_regexp
- @raw_regexp ||= /^\[plaintext\]$\n(.*)\n^\[\/plaintext\]$(?:\s^\[attachments\]$\n(.*)\n^\[\/attachments\]$)?(?:\s^\[elided\]$\n(.*)\n^\[\/elided\]$)?/m
+ @raw_regexp ||=
+ %r{^\[plaintext\]$\n(.*)\n^\[/plaintext\]$(?:\s^\[attachments\]$\n(.*)\n^\[/attachments\]$)?(?:\s^\[elided\]$\n(.*)\n^\[/elided\]$)?}m
end
def initialize(raw)
@@ -22,7 +22,7 @@ class EmailCook
def link_string!(line, unescaped_line)
unescaped_line = unescaped_line.strip
line.gsub!(/\S+/) do |str|
- if str.match?(/^(https?:\/\/)[\S]+$/i)
+ if str.match?(%r{^(https?://)[\S]+$}i)
begin
url = URI.parse(str).to_s
if unescaped_line == url
@@ -52,7 +52,7 @@ class EmailCook
if line =~ /^\s*>/
in_quote = true
- line.sub!(/^[\s>]*/, '')
+ line.sub!(/^[\s>]*/, "")
unescaped_line = line
line = CGI.escapeHTML(line)
@@ -64,7 +64,6 @@ class EmailCook
quote_buffer = ""
in_quote = false
else
-
sz = line.size
unescaped_line = line
@@ -72,9 +71,7 @@ class EmailCook
link_string!(line, unescaped_line)
if sz < 60
- if in_text && line == "\n"
- result << " "
- end
+ result << " " if in_text && line == "\n"
result << line
result << " "
@@ -86,11 +83,9 @@ class EmailCook
end
end
- if in_quote && quote_buffer.present?
- add_quote(result, quote_buffer)
- end
+ add_quote(result, quote_buffer) if in_quote && quote_buffer.present?
- result.gsub!(/( \n*){3,10}/, '
')
+ result.gsub!(/( \n*){3,10}/, "
")
result
end
@@ -98,10 +93,9 @@ class EmailCook
# fallback to PrettyText if we failed to detect a body
return PrettyText.cook(@raw, opts) if @body.nil?
- result = htmlify(@body)
+ result = htmlify(@body)
result << "\n " << @attachment_html if @attachment_html.present?
result << "\n
" << Email::Receiver.elided_html(htmlify(@elided)) if @elided.present?
result
end
-
end
diff --git a/lib/email_updater.rb b/lib/email_updater.rb
index 8f5fb292e2..c25c2382e8 100644
--- a/lib/email_updater.rb
+++ b/lib/email_updater.rb
@@ -26,8 +26,8 @@ class EmailUpdater
if SiteSetting.hide_email_address_taken
Jobs.enqueue(:critical_user_email, type: "account_exists", user_id: existing_user.id)
else
- error_message = +'change_email.error'
- error_message << '_staged' if existing_user.staged?
+ error_message = +"change_email.error"
+ error_message << "_staged" if existing_user.staged?
errors.add(:base, I18n.t(error_message))
end
end
@@ -57,19 +57,23 @@ class EmailUpdater
@change_req.new_email = email
end
- if @change_req.change_state.blank? || @change_req.change_state == EmailChangeRequest.states[:complete]
- @change_req.change_state = if SiteSetting.require_change_email_confirmation || @user.staff?
- EmailChangeRequest.states[:authorizing_old]
- else
- EmailChangeRequest.states[:authorizing_new]
- end
+ if @change_req.change_state.blank? ||
+ @change_req.change_state == EmailChangeRequest.states[:complete]
+ @change_req.change_state =
+ if SiteSetting.require_change_email_confirmation || @user.staff?
+ EmailChangeRequest.states[:authorizing_old]
+ else
+ EmailChangeRequest.states[:authorizing_new]
+ end
end
if @change_req.change_state == EmailChangeRequest.states[:authorizing_old]
- @change_req.old_email_token = @user.email_tokens.create!(email: @user.email, scope: EmailToken.scopes[:email_update])
+ @change_req.old_email_token =
+ @user.email_tokens.create!(email: @user.email, scope: EmailToken.scopes[:email_update])
send_email(add ? "confirm_old_email_add" : "confirm_old_email", @change_req.old_email_token)
elsif @change_req.change_state == EmailChangeRequest.states[:authorizing_new]
- @change_req.new_email_token = @user.email_tokens.create!(email: email, scope: EmailToken.scopes[:email_update])
+ @change_req.new_email_token =
+ @user.email_tokens.create!(email: email, scope: EmailToken.scopes[:email_update])
send_email("confirm_new_email", @change_req.new_email_token)
end
@@ -83,7 +87,7 @@ class EmailUpdater
User.transaction do
email_token = EmailToken.confirmable(token, scope: EmailToken.scopes[:email_update])
if email_token.blank?
- errors.add(:base, I18n.t('change_email.already_done'))
+ errors.add(:base, I18n.t("change_email.already_done"))
confirm_result = :error
next
end
@@ -91,15 +95,24 @@ class EmailUpdater
email_token.update!(confirmed: true)
@user = email_token.user
- @change_req = @user.email_change_requests
- .where('old_email_token_id = :token_id OR new_email_token_id = :token_id', token_id: email_token.id)
- .first
+ @change_req =
+ @user
+ .email_change_requests
+ .where(
+ "old_email_token_id = :token_id OR new_email_token_id = :token_id",
+ token_id: email_token.id,
+ )
+ .first
case @change_req.try(:change_state)
when EmailChangeRequest.states[:authorizing_old]
@change_req.update!(
change_state: EmailChangeRequest.states[:authorizing_new],
- new_email_token: @user.email_tokens.create!(email: @change_req.new_email, scope: EmailToken.scopes[:email_update])
+ new_email_token:
+ @user.email_tokens.create!(
+ email: @change_req.new_email,
+ scope: EmailToken.scopes[:email_update],
+ ),
)
send_email("confirm_new_email", @change_req.new_email_token)
confirm_result = :authorizing_new
diff --git a/lib/ember_cli.rb b/lib/ember_cli.rb
index fd3892918b..00c65610ba 100644
--- a/lib/ember_cli.rb
+++ b/lib/ember_cli.rb
@@ -2,37 +2,47 @@
module EmberCli
def self.assets
- @assets ||= begin
- assets = %w(
- discourse.js
- admin.js
- wizard.js
- ember_jquery.js
- markdown-it-bundle.js
- start-discourse.js
- vendor.js
- )
- assets += Dir.glob("app/assets/javascripts/discourse/scripts/*.js").map { |f| File.basename(f) }
+ @assets ||=
+ begin
+ assets = %w[
+ discourse.js
+ admin.js
+ wizard.js
+ ember_jquery.js
+ markdown-it-bundle.js
+ start-discourse.js
+ vendor.js
+ ]
+ assets +=
+ Dir.glob("app/assets/javascripts/discourse/scripts/*.js").map { |f| File.basename(f) }
- Discourse.find_plugin_js_assets(include_disabled: true).each do |file|
- next if file.ends_with?("_extra") # these are still handled by sprockets
- assets << "#{file}.js"
+ Discourse
+ .find_plugin_js_assets(include_disabled: true)
+ .each do |file|
+ next if file.ends_with?("_extra") # these are still handled by sprockets
+ assets << "#{file}.js"
+ end
+
+ assets
end
-
- assets
- end
end
def self.script_chunks
- return @@chunk_infos if defined? @@chunk_infos
+ return @@chunk_infos if defined?(@@chunk_infos)
- raw_chunk_infos = JSON.parse(File.read("#{Rails.configuration.root}/app/assets/javascripts/discourse/dist/chunks.json"))
+ raw_chunk_infos =
+ JSON.parse(
+ File.read("#{Rails.configuration.root}/app/assets/javascripts/discourse/dist/chunks.json"),
+ )
- chunk_infos = raw_chunk_infos["scripts"].map do |info|
- logical_name = info["afterFile"][/\Aassets\/(.*)\.js\z/, 1]
- chunks = info["scriptChunks"].map { |filename| filename[/\Aassets\/(.*)\.js\z/, 1] }
- [logical_name, chunks]
- end.to_h
+ chunk_infos =
+ raw_chunk_infos["scripts"]
+ .map do |info|
+ logical_name = info["afterFile"][%r{\Aassets/(.*)\.js\z}, 1]
+ chunks = info["scriptChunks"].map { |filename| filename[%r{\Aassets/(.*)\.js\z}, 1] }
+ [logical_name, chunks]
+ end
+ .to_h
@@chunk_infos = chunk_infos if Rails.env.production?
chunk_infos
@@ -45,9 +55,11 @@ module EmberCli
end
def self.ember_version
- @version ||= begin
- ember_source_package_raw = File.read("#{Rails.root}/app/assets/javascripts/node_modules/ember-source/package.json")
- JSON.parse(ember_source_package_raw)["version"]
- end
+ @version ||=
+ begin
+ ember_source_package_raw =
+ File.read("#{Rails.root}/app/assets/javascripts/node_modules/ember-source/package.json")
+ JSON.parse(ember_source_package_raw)["version"]
+ end
end
end
diff --git a/lib/encodings.rb b/lib/encodings.rb
index 8bf0c7c72b..b8e68f24e5 100644
--- a/lib/encodings.rb
+++ b/lib/encodings.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-require 'rchardet'
+require "rchardet"
module Encodings
def self.to_utf8(string)
result = CharDet.detect(string)
- encoded_string = try_utf8(string, result['encoding']) if result && result['encoding']
+ encoded_string = try_utf8(string, result["encoding"]) if result && result["encoding"]
encoded_string = force_utf8(string) if encoded_string.nil?
encoded_string
end
@@ -15,21 +15,18 @@ module Encodings
encoded = string.encode(Encoding::UTF_8, source_encoding)
encoded&.valid_encoding? ? delete_bom!(encoded) : nil
rescue Encoding::InvalidByteSequenceError,
- Encoding::UndefinedConversionError,
- Encoding::ConverterNotFoundError
+ Encoding::UndefinedConversionError,
+ Encoding::ConverterNotFoundError
nil
end
def self.force_utf8(string)
- encoded_string = string.encode(Encoding::UTF_8,
- undef: :replace,
- invalid: :replace,
- replace: '')
+ encoded_string = string.encode(Encoding::UTF_8, undef: :replace, invalid: :replace, replace: "")
delete_bom!(encoded_string)
end
def self.delete_bom!(string)
- string.sub!(/\A\xEF\xBB\xBF/, '') unless string.blank?
+ string.sub!(/\A\xEF\xBB\xBF/, "") unless string.blank?
string
end
end
diff --git a/lib/enum.rb b/lib/enum.rb
index d440121900..9c87135c72 100644
--- a/lib/enum.rb
+++ b/lib/enum.rb
@@ -43,15 +43,11 @@ class Enum < Hash
# Public: Create a subset of enum, only include specified keys.
def only(*keys)
- dup.tap do |d|
- d.keep_if { |k| keys.include?(k) }
- end
+ dup.tap { |d| d.keep_if { |k| keys.include?(k) } }
end
# Public: Create a subset of enum, preserve all items but specified ones.
def except(*keys)
- dup.tap do |d|
- d.delete_if { |k| keys.include?(k) }
- end
+ dup.tap { |d| d.delete_if { |k| keys.include?(k) } }
end
end
diff --git a/lib/excerpt_parser.rb b/lib/excerpt_parser.rb
index 2a4fbc8e3b..f01499e934 100644
--- a/lib/excerpt_parser.rb
+++ b/lib/excerpt_parser.rb
@@ -28,15 +28,14 @@ class ExcerptParser < Nokogiri::XML::SAX::Document
end
def self.get_excerpt(html, length, options)
- html ||= ''
- length = html.length if html.include?('excerpt') && CUSTOM_EXCERPT_REGEX === html
+ html ||= ""
+ length = html.length if html.include?("excerpt") && CUSTOM_EXCERPT_REGEX === html
me = self.new(length, options)
parser = Nokogiri::HTML::SAX::Parser.new(me)
- catch(:done) do
- parser.parse(html)
- end
+ catch(:done) { parser.parse(html) }
excerpt = me.excerpt.strip
- excerpt = excerpt.gsub(/\s*\n+\s*/, "\n\n") if options[:keep_onebox_source] || options[:keep_onebox_body]
+ excerpt = excerpt.gsub(/\s*\n+\s*/, "\n\n") if options[:keep_onebox_source] ||
+ options[:keep_onebox_body]
excerpt = CGI.unescapeHTML(excerpt) if options[:text_entities] == true
excerpt
end
@@ -53,8 +52,12 @@ class ExcerptParser < Nokogiri::XML::SAX::Document
end
def include_tag(name, attributes)
- characters("<#{name} #{attributes.map { |k, v| "#{k}=\"#{escape_attribute(v)}\"" }.join(' ')}>",
- truncate: false, count_it: false, encode: false)
+ characters(
+ "<#{name} #{attributes.map { |k, v| "#{k}=\"#{escape_attribute(v)}\"" }.join(" ")}>",
+ truncate: false,
+ count_it: false,
+ encode: false,
+ )
end
def start_element(name, attributes = [])
@@ -62,7 +65,7 @@ class ExcerptParser < Nokogiri::XML::SAX::Document
when "img"
attributes = Hash[*attributes.flatten]
- if attributes["class"]&.include?('emoji')
+ if attributes["class"]&.include?("emoji")
if @remap_emoji
title = (attributes["alt"] || "").gsub(":", "")
title = Emoji.lookup_unicode(title) || attributes["alt"]
@@ -83,68 +86,53 @@ class ExcerptParser < Nokogiri::XML::SAX::Document
elsif !attributes["title"].blank?
characters("[#{attributes["title"]}]")
else
- characters("[#{I18n.t 'excerpt_image'}]")
+ characters("[#{I18n.t "excerpt_image"}]")
end
- characters("(#{attributes['src']})") if @markdown_images
+ characters("(#{attributes["src"]})") if @markdown_images
end
-
when "a"
unless @strip_links
include_tag(name, attributes)
@in_a = true
end
-
when "aside"
attributes = Hash[*attributes.flatten]
- unless (@keep_onebox_source || @keep_onebox_body) && attributes['class']&.include?('onebox')
+ unless (@keep_onebox_source || @keep_onebox_body) && attributes["class"]&.include?("onebox")
@in_quote = true
end
- if attributes['class']&.include?('quote')
- if @keep_quotes || (@keep_onebox_body && attributes['data-topic'].present?)
+ if attributes["class"]&.include?("quote")
+ if @keep_quotes || (@keep_onebox_body && attributes["data-topic"].present?)
@in_quote = false
end
end
-
- when 'article'
- if attributes.include?(['class', 'onebox-body'])
- @in_quote = !@keep_onebox_body
- end
-
- when 'header'
- if attributes.include?(['class', 'source'])
- @in_quote = !@keep_onebox_source
- end
-
+ when "article"
+ @in_quote = !@keep_onebox_body if attributes.include?(%w[class onebox-body])
+ when "header"
+ @in_quote = !@keep_onebox_source if attributes.include?(%w[class source])
when "div", "span"
- if attributes.include?(["class", "excerpt"])
+ if attributes.include?(%w[class excerpt])
@excerpt = +""
@current_length = 0
@start_excerpt = true
end
-
when "details"
@detail_contents = +"" if @in_details_depth == 0
@in_details_depth += 1
-
when "summary"
if @in_details_depth == 1 && !@in_summary
@summary_contents = +""
@in_summary = true
end
-
when "svg"
attributes = Hash[*attributes.flatten]
if attributes["class"]&.include?("d-icon") && @keep_svg
include_tag(name, attributes)
@in_svg = true
end
-
when "use"
- if @in_svg && @keep_svg
- include_tag(name, attributes)
- end
+ include_tag(name, attributes) if @in_svg && @keep_svg
end
end
@@ -170,20 +158,22 @@ class ExcerptParser < Nokogiri::XML::SAX::Document
@detail_contents = clean(@detail_contents)
if @current_length + @summary_contents.length >= @length
- characters(@summary_contents,
- encode: false,
- before_string: "",
- after_string: "")
+ characters(
+ @summary_contents,
+ encode: false,
+ before_string: "",
+ after_string: "",
+ )
else
- characters(@summary_contents,
- truncate: false,
- encode: false,
- before_string: "",
- after_string: "")
+ characters(
+ @summary_contents,
+ truncate: false,
+ encode: false,
+ before_string: "",
+ after_string: "",
+ )
- characters(@detail_contents,
- encode: false,
- after_string: "")
+ characters(@detail_contents, encode: false, after_string: "")
end
end
when "summary"
@@ -202,7 +192,14 @@ class ExcerptParser < Nokogiri::XML::SAX::Document
ERB::Util.html_escape(str.strip)
end
- def characters(string, truncate: true, count_it: true, encode: true, before_string: nil, after_string: nil)
+ def characters(
+ string,
+ truncate: true,
+ count_it: true,
+ encode: true,
+ before_string: nil,
+ after_string: nil
+ )
return if @in_quote
# we call length on this so might as well ensure we have a string
diff --git a/lib/external_upload_helpers.rb b/lib/external_upload_helpers.rb
index 5b0e43f5ab..3ac4cea5bc 100644
--- a/lib/external_upload_helpers.rb
+++ b/lib/external_upload_helpers.rb
@@ -5,35 +5,41 @@
module ExternalUploadHelpers
extend ActiveSupport::Concern
- class ExternalUploadValidationError < StandardError; end
+ class ExternalUploadValidationError < StandardError
+ end
PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE = 10
CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10
COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10
included do
- before_action :external_store_check, only: [
- :generate_presigned_put,
- :complete_external_upload,
- :create_multipart,
- :batch_presign_multipart_parts,
- :abort_multipart,
- :complete_multipart
- ]
- before_action :direct_s3_uploads_check, only: [
- :generate_presigned_put,
- :complete_external_upload,
- :create_multipart,
- :batch_presign_multipart_parts,
- :abort_multipart,
- :complete_multipart
- ]
- before_action :can_upload_external?, only: [:create_multipart, :generate_presigned_put]
+ before_action :external_store_check,
+ only: %i[
+ generate_presigned_put
+ complete_external_upload
+ create_multipart
+ batch_presign_multipart_parts
+ abort_multipart
+ complete_multipart
+ ]
+ before_action :direct_s3_uploads_check,
+ only: %i[
+ generate_presigned_put
+ complete_external_upload
+ create_multipart
+ batch_presign_multipart_parts
+ abort_multipart
+ complete_multipart
+ ]
+ before_action :can_upload_external?, only: %i[create_multipart generate_presigned_put]
end
def generate_presigned_put
RateLimiter.new(
- current_user, "generate-presigned-put-upload-stub", ExternalUploadHelpers::PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE, 1.minute
+ current_user,
+ "generate-presigned-put-upload-stub",
+ ExternalUploadHelpers::PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE,
+ 1.minute,
).performed!
file_name = params.require(:file_name)
@@ -44,28 +50,28 @@ module ExternalUploadHelpers
validate_before_create_direct_upload(
file_name: file_name,
file_size: file_size,
- upload_type: type
+ upload_type: type,
)
rescue ExternalUploadValidationError => err
return render_json_error(err.message, status: 422)
end
- external_upload_data = ExternalUploadManager.create_direct_upload(
- current_user: current_user,
- file_name: file_name,
- file_size: file_size,
- upload_type: type,
- metadata: parse_allowed_metadata(params[:metadata])
- )
+ external_upload_data =
+ ExternalUploadManager.create_direct_upload(
+ current_user: current_user,
+ file_name: file_name,
+ file_size: file_size,
+ upload_type: type,
+ metadata: parse_allowed_metadata(params[:metadata]),
+ )
render json: external_upload_data
end
def complete_external_upload
unique_identifier = params.require(:unique_identifier)
- external_upload_stub = ExternalUploadStub.find_by(
- unique_identifier: unique_identifier, created_by: current_user
- )
+ external_upload_stub =
+ ExternalUploadStub.find_by(unique_identifier: unique_identifier, created_by: current_user)
return render_404 if external_upload_stub.blank?
complete_external_upload_via_manager(external_upload_stub)
@@ -73,7 +79,10 @@ module ExternalUploadHelpers
def create_multipart
RateLimiter.new(
- current_user, "create-multipart-upload", ExternalUploadHelpers::CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute
+ current_user,
+ "create-multipart-upload",
+ ExternalUploadHelpers::CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE,
+ 1.minute,
).performed!
file_name = params.require(:file_name)
@@ -84,22 +93,23 @@ module ExternalUploadHelpers
validate_before_create_multipart(
file_name: file_name,
file_size: file_size,
- upload_type: upload_type
+ upload_type: upload_type,
)
rescue ExternalUploadValidationError => err
return render_json_error(err.message, status: 422)
end
begin
- external_upload_data = create_direct_multipart_upload do
- ExternalUploadManager.create_direct_multipart_upload(
- current_user: current_user,
- file_name: file_name,
- file_size: file_size,
- upload_type: upload_type,
- metadata: parse_allowed_metadata(params[:metadata])
- )
- end
+ external_upload_data =
+ create_direct_multipart_upload do
+ ExternalUploadManager.create_direct_multipart_upload(
+ current_user: current_user,
+ file_name: file_name,
+ file_size: file_size,
+ upload_type: upload_type,
+ metadata: parse_allowed_metadata(params[:metadata]),
+ )
+ end
rescue ExternalUploadHelpers::ExternalUploadValidationError => err
return render_json_error(err.message, status: 422)
end
@@ -121,21 +131,19 @@ module ExternalUploadHelpers
# The other external upload endpoints are not hit as often, so they can stay as constant
# values for now.
RateLimiter.new(
- current_user, "batch-presign", SiteSetting.max_batch_presign_multipart_per_minute, 1.minute
+ current_user,
+ "batch-presign",
+ SiteSetting.max_batch_presign_multipart_per_minute,
+ 1.minute,
).performed!
- part_numbers = part_numbers.map do |part_number|
- validate_part_number(part_number)
- end
+ part_numbers = part_numbers.map { |part_number| validate_part_number(part_number) }
- external_upload_stub = ExternalUploadStub.find_by(
- unique_identifier: unique_identifier, created_by: current_user
- )
+ external_upload_stub =
+ ExternalUploadStub.find_by(unique_identifier: unique_identifier, created_by: current_user)
return render_404 if external_upload_stub.blank?
- if !multipart_upload_exists?(external_upload_stub)
- return render_404
- end
+ return render_404 if !multipart_upload_exists?(external_upload_stub)
store = multipart_store(external_upload_stub.upload_type)
@@ -144,7 +152,7 @@ module ExternalUploadHelpers
presigned_urls[part_number] = store.presign_multipart_part(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
- part_number: part_number
+ part_number: part_number,
)
end
@@ -157,10 +165,16 @@ module ExternalUploadHelpers
store.list_multipart_parts(
upload_id: external_upload_stub.external_upload_identifier,
key: external_upload_stub.key,
- max_parts: 1
+ max_parts: 1,
)
rescue Aws::S3::Errors::NoSuchUpload => err
- debug_upload_error(err, I18n.t("upload.external_upload_not_found", additional_detail: "path: #{external_upload_stub.key}"))
+ debug_upload_error(
+ err,
+ I18n.t(
+ "upload.external_upload_not_found",
+ additional_detail: "path: #{external_upload_stub.key}",
+ ),
+ )
return false
end
true
@@ -168,9 +182,8 @@ module ExternalUploadHelpers
def abort_multipart
external_upload_identifier = params.require(:external_upload_identifier)
- external_upload_stub = ExternalUploadStub.find_by(
- external_upload_identifier: external_upload_identifier
- )
+ external_upload_stub =
+ ExternalUploadStub.find_by(external_upload_identifier: external_upload_identifier)
# The stub could have already been deleted by an earlier error via
# ExternalUploadManager, so we consider this a great success if the
@@ -183,12 +196,20 @@ module ExternalUploadHelpers
begin
store.abort_multipart(
upload_id: external_upload_stub.external_upload_identifier,
- key: external_upload_stub.key
+ key: external_upload_stub.key,
)
rescue Aws::S3::Errors::ServiceError => err
- return render_json_error(
- debug_upload_error(err, I18n.t("upload.abort_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}")),
- status: 422
+ return(
+ render_json_error(
+ debug_upload_error(
+ err,
+ I18n.t(
+ "upload.abort_multipart_failure",
+ additional_detail: "external upload stub id: #{external_upload_stub.id}",
+ ),
+ ),
+ status: 422,
+ )
)
end
@@ -202,45 +223,57 @@ module ExternalUploadHelpers
parts = params.require(:parts)
RateLimiter.new(
- current_user, "complete-multipart-upload", ExternalUploadHelpers::COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute
+ current_user,
+ "complete-multipart-upload",
+ ExternalUploadHelpers::COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE,
+ 1.minute,
).performed!
- external_upload_stub = ExternalUploadStub.find_by(
- unique_identifier: unique_identifier, created_by: current_user
- )
+ external_upload_stub =
+ ExternalUploadStub.find_by(unique_identifier: unique_identifier, created_by: current_user)
return render_404 if external_upload_stub.blank?
- if !multipart_upload_exists?(external_upload_stub)
- return render_404
- end
+ return render_404 if !multipart_upload_exists?(external_upload_stub)
store = multipart_store(external_upload_stub.upload_type)
- parts = parts.map do |part|
- part_number = part[:part_number]
- etag = part[:etag]
- part_number = validate_part_number(part_number)
+ parts =
+ parts
+ .map do |part|
+ part_number = part[:part_number]
+ etag = part[:etag]
+ part_number = validate_part_number(part_number)
- if etag.blank?
- raise Discourse::InvalidParameters.new("All parts must have an etag and a valid part number")
- end
+ if etag.blank?
+ raise Discourse::InvalidParameters.new(
+ "All parts must have an etag and a valid part number",
+ )
+ end
- # this is done so it's an array of hashes rather than an array of
- # ActionController::Parameters
- { part_number: part_number, etag: etag }
- end.sort_by do |part|
- part[:part_number]
- end
+ # this is done so it's an array of hashes rather than an array of
+ # ActionController::Parameters
+ { part_number: part_number, etag: etag }
+ end
+ .sort_by { |part| part[:part_number] }
begin
- complete_response = store.complete_multipart(
- upload_id: external_upload_stub.external_upload_identifier,
- key: external_upload_stub.key,
- parts: parts
- )
+ complete_response =
+ store.complete_multipart(
+ upload_id: external_upload_stub.external_upload_identifier,
+ key: external_upload_stub.key,
+ parts: parts,
+ )
rescue Aws::S3::Errors::ServiceError => err
- return render_json_error(
- debug_upload_error(err, I18n.t("upload.complete_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}")),
- status: 422
+ return(
+ render_json_error(
+ debug_upload_error(
+ err,
+ I18n.t(
+ "upload.complete_multipart_failure",
+ additional_detail: "external upload stub id: #{external_upload_stub.id}",
+ ),
+ ),
+ status: 422,
+ )
)
end
@@ -270,27 +303,40 @@ module ExternalUploadHelpers
end
rescue ExternalUploadManager::SizeMismatchError => err
render_json_error(
- debug_upload_error(err, I18n.t("upload.size_mismatch_failure", additional_detail: err.message)),
- status: 422
+ debug_upload_error(
+ err,
+ I18n.t("upload.size_mismatch_failure", additional_detail: err.message),
+ ),
+ status: 422,
)
rescue ExternalUploadManager::ChecksumMismatchError => err
render_json_error(
- debug_upload_error(err, I18n.t("upload.checksum_mismatch_failure", additional_detail: err.message)),
- status: 422
+ debug_upload_error(
+ err,
+ I18n.t("upload.checksum_mismatch_failure", additional_detail: err.message),
+ ),
+ status: 422,
)
rescue ExternalUploadManager::CannotPromoteError => err
render_json_error(
- debug_upload_error(err, I18n.t("upload.cannot_promote_failure", additional_detail: err.message)),
- status: 422
+ debug_upload_error(
+ err,
+ I18n.t("upload.cannot_promote_failure", additional_detail: err.message),
+ ),
+ status: 422,
)
rescue ExternalUploadManager::DownloadFailedError, Aws::S3::Errors::NotFound => err
render_json_error(
- debug_upload_error(err, I18n.t("upload.download_failure", additional_detail: err.message)),
- status: 422
+ debug_upload_error(
+ err,
+ I18n.t("upload.download_failure", additional_detail: err.message),
+ ),
+ status: 422,
)
rescue => err
Discourse.warn_exception(
- err, message: "Complete external upload failed unexpectedly for user #{current_user.id}"
+ err,
+ message: "Complete external upload failed unexpectedly for user #{current_user.id}",
)
render_json_error(I18n.t("upload.failed"), status: 422)
@@ -308,10 +354,8 @@ module ExternalUploadHelpers
def validate_part_number(part_number)
part_number = part_number.to_i
- if !part_number.between?(1, 10000)
- raise Discourse::InvalidParameters.new(
- "Each part number should be between 1 and 10000"
- )
+ if !part_number.between?(1, 10_000)
+ raise Discourse::InvalidParameters.new("Each part number should be between 1 and 10000")
end
part_number
end
diff --git a/lib/faker/discourse.rb b/lib/faker/discourse.rb
index a2c4720026..ed527841dc 100644
--- a/lib/faker/discourse.rb
+++ b/lib/faker/discourse.rb
@@ -1,25 +1,24 @@
# frozen_string_literal: true
-require 'faker'
+require "faker"
module Faker
class Discourse < Base
class << self
-
def tag
- fetch('discourse.tags')
+ fetch("discourse.tags")
end
def category
- fetch('discourse.categories')
+ fetch("discourse.categories")
end
def group
- fetch('discourse.groups')
+ fetch("discourse.groups")
end
def topic
- fetch('discourse.topics')
+ fetch("discourse.topics")
end
end
end
diff --git a/lib/faker/discourse_markdown.rb b/lib/faker/discourse_markdown.rb
index 8f5266a191..f893f05189 100644
--- a/lib/faker/discourse_markdown.rb
+++ b/lib/faker/discourse_markdown.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-require 'faker'
-require 'net/http'
-require 'json'
+require "faker"
+require "net/http"
+require "json"
module Faker
class DiscourseMarkdown < Markdown
@@ -27,11 +27,8 @@ module Faker
image = next_image
image_file = load_image(image)
- upload = ::UploadCreator.new(
- image_file,
- image[:filename],
- origin: image[:url]
- ).create_for(user_id)
+ upload =
+ ::UploadCreator.new(image_file, image[:filename], origin: image[:url]).create_for(user_id)
::UploadMarkdown.new(upload).to_markdown if upload.present? && upload.persisted?
rescue => e
@@ -62,7 +59,7 @@ module Faker
end
image = @images.pop
- { filename: "#{image['id']}.jpg", url: "#{image['download_url']}.jpg" }
+ { filename: "#{image["id"]}.jpg", url: "#{image["download_url"]}.jpg" }
end
def image_cache_dir
@@ -74,12 +71,13 @@ module Faker
if !::File.exist?(cache_path)
FileUtils.mkdir_p(image_cache_dir)
- temp_file = ::FileHelper.download(
- image[:url],
- max_file_size: [SiteSetting.max_image_size_kb.kilobytes, 10.megabytes].max,
- tmp_file_name: "image",
- follow_redirect: true
- )
+ temp_file =
+ ::FileHelper.download(
+ image[:url],
+ max_file_size: [SiteSetting.max_image_size_kb.kilobytes, 10.megabytes].max,
+ tmp_file_name: "image",
+ follow_redirect: true,
+ )
FileUtils.cp(temp_file, cache_path)
end
diff --git a/lib/feed_element_installer.rb b/lib/feed_element_installer.rb
index 2e0ecd40ec..c96e89d144 100644
--- a/lib/feed_element_installer.rb
+++ b/lib/feed_element_installer.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require 'rexml/document'
-require 'rss'
+require "rexml/document"
+require "rss"
class FeedElementInstaller
private_class_method :new
@@ -10,7 +10,7 @@ class FeedElementInstaller
# RSS Specification at http://cyber.harvard.edu/rss/rss.html#extendingRss
# > A RSS feed may contain [non-standard elements], only if those elements are *defined in a namespace*
- new(element_name, feed).install if element_name.include?(':')
+ new(element_name, feed).install if element_name.include?(":")
end
attr_reader :feed, :original_name, :element_namespace, :element_name, :element_accessor
@@ -18,12 +18,13 @@ class FeedElementInstaller
def initialize(element_name, feed)
@feed = feed
@original_name = element_name
- @element_namespace, @element_name = *element_name.split(':')
+ @element_namespace, @element_name = *element_name.split(":")
@element_accessor = "#{@element_namespace}_#{@element_name}"
end
def element_uri
- @element_uri ||= REXML::Document.new(feed).root&.attributes&.namespaces&.fetch(@element_namespace, '') || ''
+ @element_uri ||=
+ REXML::Document.new(feed).root&.attributes&.namespaces&.fetch(@element_namespace, "") || ""
end
def install
@@ -34,13 +35,34 @@ class FeedElementInstaller
private
def install_in_rss
- RSS::Rss::Channel::Item.install_text_element(element_name, element_uri, '?', element_accessor, nil, original_name)
+ RSS::Rss::Channel::Item.install_text_element(
+ element_name,
+ element_uri,
+ "?",
+ element_accessor,
+ nil,
+ original_name,
+ )
RSS::BaseListener.install_get_text_element(element_uri, element_name, element_accessor)
end
def install_in_atom
- RSS::Atom::Entry.install_text_element(element_name, element_uri, '?', element_accessor, nil, original_name)
- RSS::Atom::Feed::Entry.install_text_element(element_name, element_uri, '?', element_accessor, nil, original_name)
+ RSS::Atom::Entry.install_text_element(
+ element_name,
+ element_uri,
+ "?",
+ element_accessor,
+ nil,
+ original_name,
+ )
+ RSS::Atom::Feed::Entry.install_text_element(
+ element_name,
+ element_uri,
+ "?",
+ element_accessor,
+ nil,
+ original_name,
+ )
RSS::BaseListener.install_get_text_element(element_uri, element_name, element_accessor)
end
@@ -49,6 +71,7 @@ class FeedElementInstaller
end
def installed_in_atom?
- RSS::Atom::Entry.method_defined?(element_accessor) || RSS::Atom::Feed::Entry.method_defined?(element_accessor)
+ RSS::Atom::Entry.method_defined?(element_accessor) ||
+ RSS::Atom::Feed::Entry.method_defined?(element_accessor)
end
end
diff --git a/lib/file_helper.rb b/lib/file_helper.rb
index 31530a12e9..9b57251f83 100644
--- a/lib/file_helper.rb
+++ b/lib/file_helper.rb
@@ -5,11 +5,10 @@ require "mini_mime"
require "open-uri"
class FileHelper
-
def self.log(log_level, message)
Rails.logger.public_send(
log_level,
- "#{RailsMultisite::ConnectionManagement.current_db}: #{message}"
+ "#{RailsMultisite::ConnectionManagement.current_db}: #{message}",
)
end
@@ -41,29 +40,31 @@ class FileHelper
attr_accessor :status
end
- def self.download(url,
- max_file_size:,
- tmp_file_name:,
- follow_redirect: false,
- read_timeout: 5,
- skip_rate_limit: false,
- verbose: false,
- validate_uri: true,
- retain_on_max_file_size_exceeded: false)
-
+ def self.download(
+ url,
+ max_file_size:,
+ tmp_file_name:,
+ follow_redirect: false,
+ read_timeout: 5,
+ skip_rate_limit: false,
+ verbose: false,
+ validate_uri: true,
+ retain_on_max_file_size_exceeded: false
+ )
url = "https:" + url if url.start_with?("//")
- raise Discourse::InvalidParameters.new(:url) unless url =~ /^https?:\/\//
+ raise Discourse::InvalidParameters.new(:url) unless url =~ %r{^https?://}
tmp = nil
- fd = FinalDestination.new(
- url,
- max_redirects: follow_redirect ? 5 : 0,
- skip_rate_limit: skip_rate_limit,
- verbose: verbose,
- validate_uri: validate_uri,
- timeout: read_timeout
- )
+ fd =
+ FinalDestination.new(
+ url,
+ max_redirects: follow_redirect ? 5 : 0,
+ skip_rate_limit: skip_rate_limit,
+ verbose: verbose,
+ validate_uri: validate_uri,
+ timeout: read_timeout,
+ )
fd.get do |response, chunk, uri|
if tmp.nil?
@@ -110,7 +111,7 @@ class FileHelper
def self.optimize_image!(filename, allow_pngquant: false)
image_optim(
allow_pngquant: allow_pngquant,
- strip_image_metadata: SiteSetting.strip_image_metadata
+ strip_image_metadata: SiteSetting.strip_image_metadata,
).optimize_image!(filename)
end
@@ -119,23 +120,26 @@ class FileHelper
# sometimes up to 200ms searching for binaries and looking at versions
memoize("image_optim", allow_pngquant, strip_image_metadata) do
pngquant_options = false
- if allow_pngquant
- pngquant_options = { allow_lossy: true }
- end
+ pngquant_options = { allow_lossy: true } if allow_pngquant
ImageOptim.new(
# GLOBAL
timeout: 15,
skip_missing_workers: true,
# PNG
- oxipng: { level: 3, strip: strip_image_metadata },
+ oxipng: {
+ level: 3,
+ strip: strip_image_metadata,
+ },
optipng: false,
advpng: false,
pngcrush: false,
pngout: false,
pngquant: pngquant_options,
# JPG
- jpegoptim: { strip: strip_image_metadata ? "all" : "none" },
+ jpegoptim: {
+ strip: strip_image_metadata ? "all" : "none",
+ },
jpegtran: false,
jpegrecompress: false,
# Skip looking for gifsicle, svgo binaries
@@ -150,24 +154,24 @@ class FileHelper
end
def self.supported_gravatar_extensions
- @@supported_gravatar_images ||= Set.new(%w{jpg jpeg png gif})
+ @@supported_gravatar_images ||= Set.new(%w[jpg jpeg png gif])
end
def self.supported_images
- @@supported_images ||= Set.new %w{jpg jpeg png gif svg ico webp}
+ @@supported_images ||= Set.new %w[jpg jpeg png gif svg ico webp]
end
def self.inline_images
# SVG cannot safely be shown as a document
- @@inline_images ||= supported_images - %w{svg}
+ @@inline_images ||= supported_images - %w[svg]
end
def self.supported_audio
- @@supported_audio ||= Set.new %w{mp3 ogg oga opus wav m4a m4b m4p m4r aac flac}
+ @@supported_audio ||= Set.new %w[mp3 ogg oga opus wav m4a m4b m4p m4r aac flac]
end
def self.supported_video
- @@supported_video ||= Set.new %w{mov mp4 webm ogv m4v 3gp avi mpeg}
+ @@supported_video ||= Set.new %w[mov mp4 webm ogv m4v 3gp avi mpeg]
end
def self.supported_video_regexp
diff --git a/lib/file_store/base_store.rb b/lib/file_store/base_store.rb
index 8b4c41cf6f..f73114ca89 100644
--- a/lib/file_store/base_store.rb
+++ b/lib/file_store/base_store.rb
@@ -1,10 +1,9 @@
# frozen_string_literal: true
module FileStore
-
class BaseStore
- UPLOAD_PATH_REGEX ||= %r|/(original/\d+X/.*)|
- OPTIMIZED_IMAGE_PATH_REGEX ||= %r|/(optimized/\d+X/.*)|
+ UPLOAD_PATH_REGEX ||= %r{/(original/\d+X/.*)}
+ OPTIMIZED_IMAGE_PATH_REGEX ||= %r{/(optimized/\d+X/.*)}
TEMPORARY_UPLOAD_PREFIX ||= "temp/"
def store_upload(file, upload, content_type = nil)
@@ -38,7 +37,7 @@ module FileStore
def upload_path
path = File.join("uploads", RailsMultisite::ConnectionManagement.current_db)
return path if !Rails.env.test?
- File.join(path, "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}")
+ File.join(path, "test_#{ENV["TEST_ENV_NUMBER"].presence || "0"}")
end
def self.temporary_upload_path(file_name, folder_prefix: "")
@@ -46,12 +45,7 @@ module FileStore
# characters, which can interfere with external providers operations and
# introduce other unexpected behaviour.
file_name_random = "#{SecureRandom.hex}#{File.extname(file_name)}"
- File.join(
- TEMPORARY_UPLOAD_PREFIX,
- folder_prefix,
- SecureRandom.hex,
- file_name_random
- )
+ File.join(TEMPORARY_UPLOAD_PREFIX, folder_prefix, SecureRandom.hex, file_name_random)
end
def has_been_uploaded?(url)
@@ -96,25 +90,37 @@ module FileStore
def download(object, max_file_size_kb: nil)
DistributedMutex.synchronize("download_#{object.sha1}", validity: 3.minutes) do
- extension = File.extname(object.respond_to?(:original_filename) ? object.original_filename : object.url)
+ extension =
+ File.extname(
+ object.respond_to?(:original_filename) ? object.original_filename : object.url,
+ )
filename = "#{object.sha1}#{extension}"
file = get_from_cache(filename)
if !file
- max_file_size_kb ||= [SiteSetting.max_image_size_kb, SiteSetting.max_attachment_size_kb].max.kilobytes
+ max_file_size_kb ||= [
+ SiteSetting.max_image_size_kb,
+ SiteSetting.max_attachment_size_kb,
+ ].max.kilobytes
secure = object.respond_to?(:secure) ? object.secure? : object.upload.secure?
- url = secure ?
- Discourse.store.signed_url_for_path(object.url) :
- Discourse.store.cdn_url(object.url)
+ url =
+ (
+ if secure
+ Discourse.store.signed_url_for_path(object.url)
+ else
+ Discourse.store.cdn_url(object.url)
+ end
+ )
- url = SiteSetting.scheme + ":" + url if url =~ /^\/\//
- file = FileHelper.download(
- url,
- max_file_size: max_file_size_kb,
- tmp_file_name: "discourse-download",
- follow_redirect: true
- )
+ url = SiteSetting.scheme + ":" + url if url =~ %r{^//}
+ file =
+ FileHelper.download(
+ url,
+ max_file_size: max_file_size_kb,
+ tmp_file_name: "discourse-download",
+ follow_redirect: true,
+ )
return nil if file.nil?
@@ -162,7 +168,8 @@ module FileStore
upload = optimized_image.upload
version = optimized_image.version || 1
- extension = "_#{version}_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
+ extension =
+ "_#{version}_#{optimized_image.width}x#{optimized_image.height}#{optimized_image.extension}"
get_path_for("optimized", upload.id, upload.sha1, extension)
end
@@ -214,5 +221,4 @@ module FileStore
path
end
end
-
end
diff --git a/lib/file_store/local_store.rb b/lib/file_store/local_store.rb
index 4922eca707..c0461aa50a 100644
--- a/lib/file_store/local_store.rb
+++ b/lib/file_store/local_store.rb
@@ -1,11 +1,9 @@
# frozen_string_literal: true
-require 'file_store/base_store'
+require "file_store/base_store"
module FileStore
-
class LocalStore < BaseStore
-
def store_file(file, path)
copy_file(file, "#{public_dir}#{path}")
"#{Discourse.base_path}#{path}"
@@ -64,7 +62,13 @@ module FileStore
def purge_tombstone(grace_period)
if Dir.exist?(Discourse.store.tombstone_dir)
Discourse::Utils.execute_command(
- 'find', tombstone_dir, '-mtime', "+#{grace_period}", '-type', 'f', '-delete'
+ "find",
+ tombstone_dir,
+ "-mtime",
+ "+#{grace_period}",
+ "-type",
+ "f",
+ "-delete",
)
end
end
@@ -108,9 +112,13 @@ module FileStore
FileUtils.mkdir_p(File.join(public_dir, upload_path))
Discourse::Utils.execute_command(
- 'rsync', '-a', '--safe-links', "#{source_path}/", "#{upload_path}/",
+ "rsync",
+ "-a",
+ "--safe-links",
+ "#{source_path}/",
+ "#{upload_path}/",
failure_message: "Failed to copy uploads.",
- chdir: public_dir
+ chdir: public_dir,
)
end
@@ -119,15 +127,14 @@ module FileStore
def list_missing(model)
count = 0
model.find_each do |upload|
-
# could be a remote image
- next unless upload.url =~ /^\/[^\/]/
+ next unless upload.url =~ %r{^/[^/]}
path = "#{public_dir}#{upload.url}"
bad = true
begin
bad = false if File.size(path) != 0
- rescue
+ rescue StandardError
# something is messed up
end
if bad
diff --git a/lib/file_store/s3_store.rb b/lib/file_store/s3_store.rb
index 074bf92446..8fdbd2d9fa 100644
--- a/lib/file_store/s3_store.rb
+++ b/lib/file_store/s3_store.rb
@@ -7,61 +7,64 @@ require "s3_helper"
require "file_helper"
module FileStore
-
class S3Store < BaseStore
TOMBSTONE_PREFIX ||= "tombstone/"
- delegate :abort_multipart, :presign_multipart_part, :list_multipart_parts,
- :complete_multipart, to: :s3_helper
+ delegate :abort_multipart,
+ :presign_multipart_part,
+ :list_multipart_parts,
+ :complete_multipart,
+ to: :s3_helper
def initialize(s3_helper = nil)
@s3_helper = s3_helper
end
def s3_helper
- @s3_helper ||= S3Helper.new(s3_bucket,
- Rails.configuration.multisite ? multisite_tombstone_prefix : TOMBSTONE_PREFIX
- )
+ @s3_helper ||=
+ S3Helper.new(
+ s3_bucket,
+ Rails.configuration.multisite ? multisite_tombstone_prefix : TOMBSTONE_PREFIX,
+ )
end
def store_upload(file, upload, content_type = nil)
upload.url = nil
path = get_path_for_upload(upload)
- url, upload.etag = store_file(
- file,
- path,
- filename: upload.original_filename,
- content_type: content_type,
- cache_locally: true,
- private_acl: upload.secure?
- )
+ url, upload.etag =
+ store_file(
+ file,
+ path,
+ filename: upload.original_filename,
+ content_type: content_type,
+ cache_locally: true,
+ private_acl: upload.secure?,
+ )
url
end
- def move_existing_stored_upload(
- existing_external_upload_key:,
- upload: nil,
- content_type: nil
- )
+ def move_existing_stored_upload(existing_external_upload_key:, upload: nil, content_type: nil)
upload.url = nil
path = get_path_for_upload(upload)
- url, upload.etag = store_file(
- nil,
- path,
- filename: upload.original_filename,
- content_type: content_type,
- cache_locally: false,
- private_acl: upload.secure?,
- move_existing: true,
- existing_external_upload_key: existing_external_upload_key
- )
+ url, upload.etag =
+ store_file(
+ nil,
+ path,
+ filename: upload.original_filename,
+ content_type: content_type,
+ cache_locally: false,
+ private_acl: upload.secure?,
+ move_existing: true,
+ existing_external_upload_key: existing_external_upload_key,
+ )
url
end
def store_optimized_image(file, optimized_image, content_type = nil, secure: false)
optimized_image.url = nil
path = get_path_for_optimized_image(optimized_image)
- url, optimized_image.etag = store_file(file, path, content_type: content_type, private_acl: secure)
+ url, optimized_image.etag =
+ store_file(file, path, content_type: content_type, private_acl: secure)
url
end
@@ -85,8 +88,9 @@ module FileStore
cache_file(file, File.basename(path)) if opts[:cache_locally]
options = {
acl: opts[:private_acl] ? "private" : "public-read",
- cache_control: 'max-age=31556952, public, immutable',
- content_type: opts[:content_type].presence || MiniMime.lookup_by_filename(filename)&.content_type
+ cache_control: "max-age=31556952, public, immutable",
+ content_type:
+ opts[:content_type].presence || MiniMime.lookup_by_filename(filename)&.content_type,
}
# add a "content disposition: attachment" header with the original
@@ -96,7 +100,8 @@ module FileStore
# browser.
if !FileHelper.is_inline_image?(filename)
options[:content_disposition] = ActionDispatch::Http::ContentDisposition.format(
- disposition: "attachment", filename: filename
+ disposition: "attachment",
+ filename: filename,
)
end
@@ -106,11 +111,7 @@ module FileStore
if opts[:move_existing] && opts[:existing_external_upload_key]
original_path = opts[:existing_external_upload_key]
options[:apply_metadata_to_destination] = true
- path, etag = s3_helper.copy(
- original_path,
- path,
- options: options
- )
+ path, etag = s3_helper.copy(original_path, path, options: options)
delete_file(original_path)
else
path, etag = s3_helper.upload(file, path, options)
@@ -142,7 +143,7 @@ module FileStore
begin
parsed_url = URI.parse(UrlHelper.encode(url))
- rescue
+ rescue StandardError
# There are many exceptions possible here including Addressable::URI:: exceptions
# and URI:: exceptions, catch all may seem wide, but it makes no sense to raise ever
# on an invalid url here
@@ -169,7 +170,10 @@ module FileStore
s3_cdn_url = URI.parse(SiteSetting.Upload.s3_cdn_url || "")
cdn_hostname = s3_cdn_url.hostname
- return true if cdn_hostname.presence && url[cdn_hostname] && (s3_cdn_url.path.blank? || parsed_url.path.starts_with?(s3_cdn_url.path))
+ if cdn_hostname.presence && url[cdn_hostname] &&
+ (s3_cdn_url.path.blank? || parsed_url.path.starts_with?(s3_cdn_url.path))
+ return true
+ end
false
end
@@ -186,7 +190,11 @@ module FileStore
end
def s3_upload_host
- SiteSetting.Upload.s3_cdn_url.present? ? SiteSetting.Upload.s3_cdn_url : "https:#{absolute_base_url}"
+ if SiteSetting.Upload.s3_cdn_url.present?
+ SiteSetting.Upload.s3_cdn_url
+ else
+ "https:#{absolute_base_url}"
+ end
end
def external?
@@ -208,28 +216,45 @@ module FileStore
def path_for(upload)
url = upload&.url
- FileStore::LocalStore.new.path_for(upload) if url && url[/^\/[^\/]/]
+ FileStore::LocalStore.new.path_for(upload) if url && url[%r{^/[^/]}]
end
def url_for(upload, force_download: false)
- upload.secure? || force_download ?
- presigned_get_url(get_upload_key(upload), force_download: force_download, filename: upload.original_filename) :
+ if upload.secure? || force_download
+ presigned_get_url(
+ get_upload_key(upload),
+ force_download: force_download,
+ filename: upload.original_filename,
+ )
+ else
upload.url
+ end
end
def cdn_url(url)
return url if SiteSetting.Upload.s3_cdn_url.blank?
- schema = url[/^(https?:)?\/\//, 1]
+ schema = url[%r{^(https?:)?//}, 1]
folder = s3_bucket_folder_path.nil? ? "" : "#{s3_bucket_folder_path}/"
- url.sub(File.join("#{schema}#{absolute_base_url}", folder), File.join(SiteSetting.Upload.s3_cdn_url, "/"))
+ url.sub(
+ File.join("#{schema}#{absolute_base_url}", folder),
+ File.join(SiteSetting.Upload.s3_cdn_url, "/"),
+ )
end
- def signed_url_for_path(path, expires_in: SiteSetting.s3_presigned_get_url_expires_after_seconds, force_download: false)
+ def signed_url_for_path(
+ path,
+ expires_in: SiteSetting.s3_presigned_get_url_expires_after_seconds,
+ force_download: false
+ )
key = path.sub(absolute_base_url + "/", "")
presigned_get_url(key, expires_in: expires_in, force_download: force_download)
end
- def signed_url_for_temporary_upload(file_name, expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS, metadata: {})
+ def signed_url_for_temporary_upload(
+ file_name,
+ expires_in: S3Helper::UPLOAD_URL_EXPIRES_AFTER_SECONDS,
+ metadata: {}
+ )
key = temporary_upload_path(file_name)
s3_helper.presigned_url(
key,
@@ -237,16 +262,15 @@ module FileStore
expires_in: expires_in,
opts: {
metadata: metadata,
- acl: "private"
- }
+ acl: "private",
+ },
)
end
def temporary_upload_path(file_name)
- folder_prefix = s3_bucket_folder_path.nil? ? upload_path : File.join(s3_bucket_folder_path, upload_path)
- FileStore::BaseStore.temporary_upload_path(
- file_name, folder_prefix: folder_prefix
- )
+ folder_prefix =
+ s3_bucket_folder_path.nil? ? upload_path : File.join(s3_bucket_folder_path, upload_path)
+ FileStore::BaseStore.temporary_upload_path(file_name, folder_prefix: folder_prefix)
end
def object_from_path(path)
@@ -264,13 +288,15 @@ module FileStore
end
def s3_bucket
- raise Discourse::SiteSettingMissing.new("s3_upload_bucket") if SiteSetting.Upload.s3_upload_bucket.blank?
+ if SiteSetting.Upload.s3_upload_bucket.blank?
+ raise Discourse::SiteSettingMissing.new("s3_upload_bucket")
+ end
SiteSetting.Upload.s3_upload_bucket.downcase
end
def list_missing_uploads(skip_optimized: false)
if SiteSetting.enable_s3_inventory
- require 's3_inventory'
+ require "s3_inventory"
S3Inventory.new(s3_helper, :upload).backfill_etags_and_list_missing
S3Inventory.new(s3_helper, :optimized).backfill_etags_and_list_missing unless skip_optimized
else
@@ -326,7 +352,6 @@ module FileStore
s3_options: FileStore::ToS3Migration.s3_options_from_site_settings,
migrate_to_multisite: Rails.configuration.multisite,
).migrate
-
ensure
FileUtils.rm(public_upload_path) if File.symlink?(public_upload_path)
FileUtils.mv(old_upload_path, public_upload_path) if old_upload_path
@@ -349,7 +374,8 @@ module FileStore
if force_download && filename
opts[:response_content_disposition] = ActionDispatch::Http::ContentDisposition.format(
- disposition: "attachment", filename: filename
+ disposition: "attachment",
+ filename: filename,
)
end
@@ -375,11 +401,11 @@ module FileStore
def list_missing(model, prefix)
connection = ActiveRecord::Base.connection.raw_connection
- connection.exec('CREATE TEMP TABLE verified_ids(val integer PRIMARY KEY)')
+ connection.exec("CREATE TEMP TABLE verified_ids(val integer PRIMARY KEY)")
marker = nil
files = s3_helper.list(prefix, marker)
- while files.count > 0 do
+ while files.count > 0
verified_ids = []
files.each do |f|
@@ -388,23 +414,25 @@ module FileStore
marker = f.key
end
- verified_id_clause = verified_ids.map { |id| "('#{PG::Connection.escape_string(id.to_s)}')" }.join(",")
+ verified_id_clause =
+ verified_ids.map { |id| "('#{PG::Connection.escape_string(id.to_s)}')" }.join(",")
connection.exec("INSERT INTO verified_ids VALUES #{verified_id_clause}")
files = s3_helper.list(prefix, marker)
end
- missing_uploads = model.joins('LEFT JOIN verified_ids ON verified_ids.val = id').where("verified_ids.val IS NULL")
+ missing_uploads =
+ model.joins("LEFT JOIN verified_ids ON verified_ids.val = id").where(
+ "verified_ids.val IS NULL",
+ )
missing_count = missing_uploads.count
if missing_count > 0
- missing_uploads.find_each do |upload|
- puts upload.url
- end
+ missing_uploads.find_each { |upload| puts upload.url }
puts "#{missing_count} of #{model.count} #{model.name.underscore.pluralize} are missing"
end
ensure
- connection.exec('DROP TABLE verified_ids') unless connection.nil?
+ connection.exec("DROP TABLE verified_ids") unless connection.nil?
end
end
end
diff --git a/lib/file_store/to_s3_migration.rb b/lib/file_store/to_s3_migration.rb
index 0d6c098327..b99c911a6b 100644
--- a/lib/file_store/to_s3_migration.rb
+++ b/lib/file_store/to_s3_migration.rb
@@ -1,16 +1,20 @@
# frozen_string_literal: true
-require 'aws-sdk-s3'
+require "aws-sdk-s3"
module FileStore
ToS3MigrationError = Class.new(RuntimeError)
class ToS3Migration
- MISSING_UPLOADS_RAKE_TASK_NAME ||= 'posts:missing_uploads'
+ MISSING_UPLOADS_RAKE_TASK_NAME ||= "posts:missing_uploads"
UPLOAD_CONCURRENCY ||= 20
- def initialize(s3_options:, dry_run: false, migrate_to_multisite: false, skip_etag_verify: false)
-
+ def initialize(
+ s3_options:,
+ dry_run: false,
+ migrate_to_multisite: false,
+ skip_etag_verify: false
+ )
@s3_bucket = s3_options[:bucket]
@s3_client_options = s3_options[:client_options]
@dry_run = dry_run
@@ -22,20 +26,18 @@ module FileStore
def self.s3_options_from_site_settings
{
client_options: S3Helper.s3_options(SiteSetting),
- bucket: SiteSetting.Upload.s3_upload_bucket
+ bucket: SiteSetting.Upload.s3_upload_bucket,
}
end
def self.s3_options_from_env
- unless ENV["DISCOURSE_S3_BUCKET"].present? &&
- ENV["DISCOURSE_S3_REGION"].present? &&
- (
- (
- ENV["DISCOURSE_S3_ACCESS_KEY_ID"].present? &&
- ENV["DISCOURSE_S3_SECRET_ACCESS_KEY"].present?
- ) || ENV["DISCOURSE_S3_USE_IAM_PROFILE"].present?
- )
-
+ unless ENV["DISCOURSE_S3_BUCKET"].present? && ENV["DISCOURSE_S3_REGION"].present? &&
+ (
+ (
+ ENV["DISCOURSE_S3_ACCESS_KEY_ID"].present? &&
+ ENV["DISCOURSE_S3_SECRET_ACCESS_KEY"].present?
+ ) || ENV["DISCOURSE_S3_USE_IAM_PROFILE"].present?
+ )
raise ToS3MigrationError.new(<<~TEXT)
Please provide the following environment variables:
- DISCOURSE_S3_BUCKET
@@ -53,13 +55,10 @@ module FileStore
if ENV["DISCOURSE_S3_USE_IAM_PROFILE"].blank?
opts[:access_key_id] = ENV["DISCOURSE_S3_ACCESS_KEY_ID"]
- opts[:secret_access_key] = ENV["DISCOURSE_S3_SECRET_ACCESS_KEY"]
+ opts[:secret_access_key] = ENV["DISCOURSE_S3_SECRET_ACCESS_KEY"]
end
- {
- client_options: opts,
- bucket: ENV["DISCOURSE_S3_BUCKET"]
- }
+ { client_options: opts, bucket: ENV["DISCOURSE_S3_BUCKET"] }
end
def migrate
@@ -75,7 +74,8 @@ module FileStore
base_url = File.join(SiteSetting.Upload.s3_base_url, prefix)
count = Upload.by_users.where("url NOT LIKE '#{base_url}%'").count
if count > 0
- error_message = "#{count} of #{Upload.count} uploads are not migrated to S3. #{failure_message}"
+ error_message =
+ "#{count} of #{Upload.count} uploads are not migrated to S3. #{failure_message}"
raise_or_log(error_message, should_raise)
success = false
end
@@ -88,7 +88,9 @@ module FileStore
success = false
end
- Discourse::Application.load_tasks unless Rake::Task.task_defined?(MISSING_UPLOADS_RAKE_TASK_NAME)
+ unless Rake::Task.task_defined?(MISSING_UPLOADS_RAKE_TASK_NAME)
+ Discourse::Application.load_tasks
+ end
Rake::Task[MISSING_UPLOADS_RAKE_TASK_NAME]
count = DB.query_single(<<~SQL, Post::MISSING_UPLOADS, Post::MISSING_UPLOADS_IGNORED).first
SELECT COUNT(1)
@@ -109,10 +111,14 @@ module FileStore
success = false
end
- count = Post.where('baked_version <> ? OR baked_version IS NULL', Post::BAKED_VERSION).count
+ count = Post.where("baked_version <> ? OR baked_version IS NULL", Post::BAKED_VERSION).count
if count > 0
log("#{count} posts still require rebaking and will be rebaked during regular job")
- log("To speed up migrations of posts we recommend you run 'rake posts:rebake_uncooked_posts'") if count > 100
+ if count > 100
+ log(
+ "To speed up migrations of posts we recommend you run 'rake posts:rebake_uncooked_posts'",
+ )
+ end
success = false
else
log("No posts require rebaking")
@@ -153,8 +159,10 @@ module FileStore
Upload.migrate_to_new_scheme
if !uploads_migrated_to_new_scheme?
- raise ToS3MigrationError.new("Some uploads could not be migrated to the new scheme. " \
- "You need to fix this manually.")
+ raise ToS3MigrationError.new(
+ "Some uploads could not be migrated to the new scheme. " \
+ "You need to fix this manually.",
+ )
end
end
@@ -174,10 +182,12 @@ module FileStore
log " - Listing local files"
local_files = []
- IO.popen("cd #{public_directory} && find uploads/#{@current_db}/original -type f").each do |file|
- local_files << file.chomp
- putc "." if local_files.size % 1000 == 0
- end
+ IO
+ .popen("cd #{public_directory} && find uploads/#{@current_db}/original -type f")
+ .each do |file|
+ local_files << file.chomp
+ putc "." if local_files.size % 1000 == 0
+ end
log " => #{local_files.size} files"
log " - Listing S3 files"
@@ -203,19 +213,20 @@ module FileStore
failed = []
lock = Mutex.new
- upload_threads = UPLOAD_CONCURRENCY.times.map do
- Thread.new do
- while obj = queue.pop
- if s3.put_object(obj[:options]).etag[obj[:etag]]
- putc "."
- lock.synchronize { synced += 1 }
- else
- putc "X"
- lock.synchronize { failed << obj[:path] }
+ upload_threads =
+ UPLOAD_CONCURRENCY.times.map do
+ Thread.new do
+ while obj = queue.pop
+ if s3.put_object(obj[:options]).etag[obj[:etag]]
+ putc "."
+ lock.synchronize { synced += 1 }
+ else
+ putc "X"
+ lock.synchronize { failed << obj[:path] }
+ end
end
end
end
- end
local_files.each do |file|
path = File.join(public_directory, file)
@@ -242,17 +253,17 @@ module FileStore
if upload&.original_filename
options[:content_disposition] = ActionDispatch::Http::ContentDisposition.format(
- disposition: "attachment", filename: upload.original_filename
+ disposition: "attachment",
+ filename: upload.original_filename,
)
end
- if upload&.secure
- options[:acl] = "private"
- end
+ options[:acl] = "private" if upload&.secure
elsif !FileHelper.is_inline_image?(name)
upload = Upload.find_by(url: "/#{file}")
options[:content_disposition] = ActionDispatch::Http::ContentDisposition.format(
- disposition: "attachment", filename: upload&.original_filename || name
+ disposition: "attachment",
+ filename: upload&.original_filename || name,
)
end
@@ -292,26 +303,25 @@ module FileStore
[
[
"src=\"/uploads/#{@current_db}/original/(\\dX/(?:[a-f0-9]/)*[a-f0-9]{40}[a-z0-9\\.]*)",
- "src=\"#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1"
+ "src=\"#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1",
],
[
"src='/uploads/#{@current_db}/original/(\\dX/(?:[a-f0-9]/)*[a-f0-9]{40}[a-z0-9\\.]*)",
- "src='#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1"
+ "src='#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1",
],
[
"href=\"/uploads/#{@current_db}/original/(\\dX/(?:[a-f0-9]/)*[a-f0-9]{40}[a-z0-9\\.]*)",
- "href=\"#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1"
+ "href=\"#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1",
],
[
"href='/uploads/#{@current_db}/original/(\\dX/(?:[a-f0-9]/)*[a-f0-9]{40}[a-z0-9\\.]*)",
- "href='#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1"
+ "href='#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1",
],
[
"\\[img\\]/uploads/#{@current_db}/original/(\\dX/(?:[a-f0-9]/)*[a-f0-9]{40}[a-z0-9\\.]*)\\[/img\\]",
- "[img]#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1[/img]"
- ]
+ "[img]#{SiteSetting.Upload.s3_base_url}/#{prefix}\\1[/img]",
+ ],
].each do |from_url, to_url|
-
if @dry_run
log "REPLACING '#{from_url}' WITH '#{to_url}'"
else
@@ -321,16 +331,22 @@ module FileStore
unless @dry_run
# Legacy inline image format
- Post.where("raw LIKE '%%'").each do |post|
- regexp = /!\[\](\/uploads\/#{@current_db}\/original\/(\dX\/(?:[a-f0-9]\/)*[a-f0-9]{40}[a-z0-9\.]*))/
+ Post
+ .where("raw LIKE '%%'")
+ .each do |post|
+ regexp =
+ /!\[\](\/uploads\/#{@current_db}\/original\/(\dX\/(?:[a-f0-9]\/)*[a-f0-9]{40}[a-z0-9\.]*))/
- post.raw.scan(regexp).each do |upload_url, _|
- upload = Upload.get_from_url(upload_url)
- post.raw = post.raw.gsub("", "")
+ post
+ .raw
+ .scan(regexp)
+ .each do |upload_url, _|
+ upload = Upload.get_from_url(upload_url)
+ post.raw = post.raw.gsub("", "")
+ end
+
+ post.save!(validate: false)
end
-
- post.save!(validate: false)
- end
end
if Discourse.asset_host.present?
@@ -373,7 +389,6 @@ module FileStore
migration_successful?(should_raise: true)
log "Done!"
-
ensure
Jobs.run_later!
end
diff --git a/lib/filter_best_posts.rb b/lib/filter_best_posts.rb
index a243156b23..2ee63d35b5 100644
--- a/lib/filter_best_posts.rb
+++ b/lib/filter_best_posts.rb
@@ -1,16 +1,13 @@
# frozen_string_literal: true
class FilterBestPosts
-
attr_accessor :filtered_posts, :posts
def initialize(topic, filtered_posts, limit, options = {})
@filtered_posts = filtered_posts
@topic = topic
@limit = limit
- options.each do |key, value|
- self.instance_variable_set("@#{key}".to_sym, value)
- end
+ options.each { |key, value| self.instance_variable_set("@#{key}".to_sym, value) }
filter
end
@@ -31,37 +28,41 @@ class FilterBestPosts
def filter_posts_liked_by_moderators
return unless @only_moderator_liked
- liked_by_moderators = PostAction.where(post_id: @filtered_posts.pluck(:id), post_action_type_id: PostActionType.types[:like])
- liked_by_moderators = liked_by_moderators.joins(:user).where('users.moderator').pluck(:post_id)
+ liked_by_moderators =
+ PostAction.where(
+ post_id: @filtered_posts.pluck(:id),
+ post_action_type_id: PostActionType.types[:like],
+ )
+ liked_by_moderators = liked_by_moderators.joins(:user).where("users.moderator").pluck(:post_id)
@filtered_posts = @filtered_posts.where(id: liked_by_moderators)
end
def setup_posts
- @posts = @filtered_posts.order('percent_rank asc, sort_order asc').where("post_number > 1")
+ @posts = @filtered_posts.order("percent_rank asc, sort_order asc").where("post_number > 1")
@posts = @posts.includes(:reply_to_user).includes(:topic).joins(:user).limit(@limit)
end
def filter_posts_based_on_trust_level
- return unless @min_trust_level.try('>', 0)
+ return unless @min_trust_level.try(">", 0)
@posts =
- if @bypass_trust_level_score.try('>', 0)
- @posts.where('COALESCE(users.trust_level,0) >= ? OR posts.score >= ?',
+ if @bypass_trust_level_score.try(">", 0)
+ @posts.where(
+ "COALESCE(users.trust_level,0) >= ? OR posts.score >= ?",
@min_trust_level,
- @bypass_trust_level_score
+ @bypass_trust_level_score,
)
else
- @posts.where('COALESCE(users.trust_level,0) >= ?', @min_trust_level)
+ @posts.where("COALESCE(users.trust_level,0) >= ?", @min_trust_level)
end
end
def filter_posts_based_on_score
- return unless @min_score.try('>', 0)
- @posts = @posts.where('posts.score >= ?', @min_score)
+ return unless @min_score.try(">", 0)
+ @posts = @posts.where("posts.score >= ?", @min_score)
end
def sort_posts
@posts = Post.from(@posts, :posts).order(post_number: :asc)
end
-
end
diff --git a/lib/final_destination.rb b/lib/final_destination.rb
index fa5cbcc06d..893b38e9c5 100644
--- a/lib/final_destination.rb
+++ b/lib/final_destination.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-require 'socket'
-require 'ipaddr'
-require 'excon'
-require 'rate_limiter'
-require 'url_helper'
+require "socket"
+require "ipaddr"
+require "excon"
+require "rate_limiter"
+require "url_helper"
# Determine the final endpoint for a Web URI, following redirects
class FinalDestination
@@ -30,7 +30,8 @@ class FinalDestination
"HTTPS_DOMAIN_#{domain}"
end
- DEFAULT_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15"
+ DEFAULT_USER_AGENT =
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.0 Safari/605.1.15"
attr_reader :status, :cookie, :status_code, :content_type, :ignored
@@ -53,15 +54,11 @@ class FinalDestination
if @limit > 0
ignore_redirects = [Discourse.base_url_no_prefix]
- if @opts[:ignore_redirects]
- ignore_redirects.concat(@opts[:ignore_redirects])
- end
+ ignore_redirects.concat(@opts[:ignore_redirects]) if @opts[:ignore_redirects]
ignore_redirects.each do |ignore_redirect|
ignore_redirect = uri(ignore_redirect)
- if ignore_redirect.present? && ignore_redirect.hostname
- @ignored << ignore_redirect.hostname
- end
+ @ignored << ignore_redirect.hostname if ignore_redirect.present? && ignore_redirect.hostname
end
end
@@ -74,7 +71,14 @@ class FinalDestination
@timeout = @opts[:timeout] || nil
@preserve_fragment_url = @preserve_fragment_url_hosts.any? { |host| hostname_matches?(host) }
@validate_uri = @opts.fetch(:validate_uri) { true }
- @user_agent = @force_custom_user_agent_hosts.any? { |host| hostname_matches?(host) } ? Onebox.options.user_agent : @default_user_agent
+ @user_agent =
+ (
+ if @force_custom_user_agent_hosts.any? { |host| hostname_matches?(host) }
+ Onebox.options.user_agent
+ else
+ @default_user_agent
+ end
+ )
@stop_at_blocked_pages = @opts[:stop_at_blocked_pages]
end
@@ -107,10 +111,10 @@ class FinalDestination
"User-Agent" => @user_agent,
"Accept" => "*/*",
"Accept-Language" => "*",
- "Host" => @uri.hostname
+ "Host" => @uri.hostname,
}
- result['Cookie'] = @cookie if @cookie
+ result["Cookie"] = @cookie if @cookie
result
end
@@ -119,7 +123,12 @@ class FinalDestination
status_code, response_headers = nil
catch(:done) do
- FinalDestination::HTTP.start(@uri.host, @uri.port, use_ssl: @uri.is_a?(URI::HTTPS), open_timeout: timeout) do |http|
+ FinalDestination::HTTP.start(
+ @uri.host,
+ @uri.port,
+ use_ssl: @uri.is_a?(URI::HTTPS),
+ open_timeout: timeout,
+ ) do |http|
http.read_timeout = timeout
http.request_get(@uri.request_uri, request_headers) do |resp|
status_code = resp.code.to_i
@@ -162,7 +171,8 @@ class FinalDestination
location = "#{@uri.scheme}://#{@uri.host}#{location}" if location[0] == "/"
@uri = uri(location)
- if @uri && redirects == @max_redirects && @https_redirect_ignore_limit && same_uri_but_https?(old_uri, @uri)
+ if @uri && redirects == @max_redirects && @https_redirect_ignore_limit &&
+ same_uri_but_https?(old_uri, @uri)
redirects += 1
@https_redirect_ignore_limit = false
end
@@ -177,7 +187,7 @@ class FinalDestination
return if !@uri
extra = nil
- extra = { 'Cookie' => cookie } if cookie
+ extra = { "Cookie" => cookie } if cookie
get(redirects - 1, extra_headers: extra, &blk)
elsif result == :ok
@@ -223,11 +233,16 @@ class FinalDestination
request_start_time = Time.now
response_body = +""
- request_validator = lambda do |chunk, _remaining_bytes, _total_bytes|
- response_body << chunk
- raise Excon::Errors::ExpectationFailed.new("response size too big: #{@uri.to_s}") if response_body.bytesize > MAX_REQUEST_SIZE_BYTES
- raise Excon::Errors::ExpectationFailed.new("connect timeout reached: #{@uri.to_s}") if Time.now - request_start_time > MAX_REQUEST_TIME_SECONDS
- end
+ request_validator =
+ lambda do |chunk, _remaining_bytes, _total_bytes|
+ response_body << chunk
+ if response_body.bytesize > MAX_REQUEST_SIZE_BYTES
+ raise Excon::Errors::ExpectationFailed.new("response size too big: #{@uri.to_s}")
+ end
+ if Time.now - request_start_time > MAX_REQUEST_TIME_SECONDS
+ raise Excon::Errors::ExpectationFailed.new("connect timeout reached: #{@uri.to_s}")
+ end
+ end
# This technique will only use the first resolved IP
# TODO: Can we standardise this by using FinalDestination::HTTP?
@@ -240,18 +255,20 @@ class FinalDestination
request_uri = @uri.dup
request_uri.hostname = resolved_ip unless Rails.env.test? # WebMock doesn't understand the IP-based requests
- response = Excon.public_send(@http_verb,
- request_uri.to_s,
- read_timeout: timeout,
- connect_timeout: timeout,
- headers: { "Host" => @uri.hostname }.merge(headers),
- middlewares: middlewares,
- response_block: request_validator,
- ssl_verify_peer_host: @uri.hostname
- )
+ response =
+ Excon.public_send(
+ @http_verb,
+ request_uri.to_s,
+ read_timeout: timeout,
+ connect_timeout: timeout,
+ headers: { "Host" => @uri.hostname }.merge(headers),
+ middlewares: middlewares,
+ response_block: request_validator,
+ ssl_verify_peer_host: @uri.hostname,
+ )
if @stop_at_blocked_pages
- if blocked_domain?(@uri) || response.headers['Discourse-No-Onebox'] == "1"
+ if blocked_domain?(@uri) || response.headers["Discourse-No-Onebox"] == "1"
@status = :blocked_page
return
end
@@ -282,7 +299,7 @@ class FinalDestination
end
end
- @content_type = response.headers['Content-Type'] if response.headers.has_key?('Content-Type')
+ @content_type = response.headers["Content-Type"] if response.headers.has_key?("Content-Type")
@status = :resolved
return @uri
when 103, 400, 405, 406, 409, 500, 501
@@ -306,11 +323,11 @@ class FinalDestination
end
response_headers = {}
- if cookie_val = small_headers['set-cookie']
+ if cookie_val = small_headers["set-cookie"]
response_headers[:cookies] = cookie_val
end
- if location_val = small_headers['location']
+ if location_val = small_headers["location"]
response_headers[:location] = location_val.join
end
end
@@ -318,21 +335,20 @@ class FinalDestination
unless response_headers
response_headers = {
cookies: response.data[:cookies] || response.headers[:"set-cookie"],
- location: response.headers[:location]
+ location: response.headers[:location],
}
end
- if (300..399).include?(response_status)
- location = response_headers[:location]
- end
+ location = response_headers[:location] if (300..399).include?(response_status)
if cookies = response_headers[:cookies]
- @cookie = Array.wrap(cookies).map { |c| c.split(';').first.strip }.join('; ')
+ @cookie = Array.wrap(cookies).map { |c| c.split(";").first.strip }.join("; ")
end
if location
redirect_uri = uri(location)
- if @uri.host == redirect_uri.host && (redirect_uri.path =~ /\/login/ || redirect_uri.path =~ /\/session/)
+ if @uri.host == redirect_uri.host &&
+ (redirect_uri.path =~ %r{/login} || redirect_uri.path =~ %r{/session})
@status = :resolved
return @uri
end
@@ -342,7 +358,8 @@ class FinalDestination
location = "#{@uri.scheme}://#{@uri.host}#{location}" if location[0] == "/"
@uri = uri(location)
- if @uri && @limit == @max_redirects && @https_redirect_ignore_limit && same_uri_but_https?(old_uri, @uri)
+ if @uri && @limit == @max_redirects && @https_redirect_ignore_limit &&
+ same_uri_but_https?(old_uri, @uri)
@limit += 1
@https_redirect_ignore_limit = false
end
@@ -376,12 +393,18 @@ class FinalDestination
def validate_uri_format
return false unless @uri && @uri.host
- return false unless ['https', 'http'].include?(@uri.scheme)
- return false if @uri.scheme == 'http' && @uri.port != 80
- return false if @uri.scheme == 'https' && @uri.port != 443
+ return false unless %w[https http].include?(@uri.scheme)
+ return false if @uri.scheme == "http" && @uri.port != 80
+ return false if @uri.scheme == "https" && @uri.port != 443
# Disallow IP based crawling
- (IPAddr.new(@uri.hostname) rescue nil).nil?
+ (
+ begin
+ IPAddr.new(@uri.hostname)
+ rescue StandardError
+ nil
+ end
+ ).nil?
end
def hostname
@@ -392,11 +415,11 @@ class FinalDestination
url = uri(url)
if @uri&.hostname.present? && url&.hostname.present?
- hostname_parts = url.hostname.split('.')
- has_wildcard = hostname_parts.first == '*'
+ hostname_parts = url.hostname.split(".")
+ has_wildcard = hostname_parts.first == "*"
if has_wildcard
- @uri.hostname.end_with?(hostname_parts[1..-1].join('.'))
+ @uri.hostname.end_with?(hostname_parts[1..-1].join("."))
else
@uri.hostname == url.hostname
end
@@ -413,7 +436,7 @@ class FinalDestination
Rails.logger.public_send(
log_level,
- "#{RailsMultisite::ConnectionManagement.current_db}: #{message}"
+ "#{RailsMultisite::ConnectionManagement.current_db}: #{message}",
)
end
@@ -425,15 +448,12 @@ class FinalDestination
headers_subset = Struct.new(:location, :set_cookie).new
safe_session(uri) do |http|
- headers = request_headers.merge(
- 'Accept-Encoding' => 'gzip',
- 'Host' => uri.host
- )
+ headers = request_headers.merge("Accept-Encoding" => "gzip", "Host" => uri.host)
req = FinalDestination::HTTP::Get.new(uri.request_uri, headers)
http.request(req) do |resp|
- headers_subset.set_cookie = resp['Set-Cookie']
+ headers_subset.set_cookie = resp["Set-Cookie"]
if @stop_at_blocked_pages
dont_onebox = resp["Discourse-No-Onebox"] == "1"
@@ -444,7 +464,7 @@ class FinalDestination
end
if Net::HTTPRedirection === resp
- headers_subset.location = resp['location']
+ headers_subset.location = resp["location"]
result = :redirect, headers_subset
end
@@ -471,9 +491,7 @@ class FinalDestination
end
result = :ok, headers_subset
else
- catch(:done) do
- yield resp, nil, nil
- end
+ catch(:done) { yield resp, nil, nil }
end
end
end
@@ -490,7 +508,12 @@ class FinalDestination
end
def safe_session(uri)
- FinalDestination::HTTP.start(uri.host, uri.port, use_ssl: (uri.scheme == "https"), open_timeout: timeout) do |http|
+ FinalDestination::HTTP.start(
+ uri.host,
+ uri.port,
+ use_ssl: (uri.scheme == "https"),
+ open_timeout: timeout,
+ ) do |http|
http.read_timeout = timeout
yield http
end
@@ -508,14 +531,14 @@ class FinalDestination
def fetch_canonical_url(body)
return if body.blank?
- canonical_element = Nokogiri::HTML5(body).at("link[rel='canonical']")
+ canonical_element = Nokogiri.HTML5(body).at("link[rel='canonical']")
return if canonical_element.nil?
- canonical_uri = uri(canonical_element['href'])
+ canonical_uri = uri(canonical_element["href"])
return if canonical_uri.blank?
return canonical_uri if canonical_uri.host.present?
parts = [@uri.host, canonical_uri.to_s]
- complete_url = canonical_uri.to_s.starts_with?('/') ? parts.join('') : parts.join('/')
+ complete_url = canonical_uri.to_s.starts_with?("/") ? parts.join("") : parts.join("/")
complete_url = "#{@uri.scheme}://#{complete_url}" if @uri.scheme
uri(complete_url)
@@ -528,8 +551,7 @@ class FinalDestination
def same_uri_but_https?(before, after)
before = before.to_s
after = after.to_s
- before.start_with?("http://") &&
- after.start_with?("https://") &&
+ before.start_with?("http://") && after.start_with?("https://") &&
before.sub("http://", "") == after.sub("https://", "")
end
end
diff --git a/lib/final_destination/resolver.rb b/lib/final_destination/resolver.rb
index f809099d4d..843a6a313b 100644
--- a/lib/final_destination/resolver.rb
+++ b/lib/final_destination/resolver.rb
@@ -39,18 +39,19 @@ class FinalDestination::Resolver
def self.ensure_lookup_thread
return if @thread&.alive?
- @thread = Thread.new do
- while true
- @queue.deq
- @error = nil
- begin
- @result = Addrinfo.getaddrinfo(@lookup, 80, nil, :STREAM).map(&:ip_address)
- rescue => e
- @error = e
+ @thread =
+ Thread.new do
+ while true
+ @queue.deq
+ @error = nil
+ begin
+ @result = Addrinfo.getaddrinfo(@lookup, 80, nil, :STREAM).map(&:ip_address)
+ rescue => e
+ @error = e
+ end
+ @parent.wakeup
end
- @parent.wakeup
end
- end
@thread.name = "final-destination_resolver_thread"
end
end
diff --git a/lib/final_destination/ssrf_detector.rb b/lib/final_destination/ssrf_detector.rb
index aeb01d9ec9..0d06306ce8 100644
--- a/lib/final_destination/ssrf_detector.rb
+++ b/lib/final_destination/ssrf_detector.rb
@@ -2,8 +2,10 @@
class FinalDestination
module SSRFDetector
- class DisallowedIpError < SocketError; end
- class LookupFailedError < SocketError; end
+ class DisallowedIpError < SocketError
+ end
+ class LookupFailedError < SocketError
+ end
def self.standard_private_ranges
@private_ranges ||= [
diff --git a/lib/flag_settings.rb b/lib/flag_settings.rb
index da86d2c1b4..d88c01e35a 100644
--- a/lib/flag_settings.rb
+++ b/lib/flag_settings.rb
@@ -1,13 +1,12 @@
# frozen_string_literal: true
class FlagSettings
-
attr_reader(
:without_custom_types,
:notify_types,
:topic_flag_types,
:auto_action_types,
- :custom_types
+ :custom_types,
)
def initialize
@@ -39,5 +38,4 @@ class FlagSettings
def flag_types
@all_flag_types
end
-
end
diff --git a/lib/freedom_patches/better_handlebars_errors.rb b/lib/freedom_patches/better_handlebars_errors.rb
index c308f74410..a11eb27120 100644
--- a/lib/freedom_patches/better_handlebars_errors.rb
+++ b/lib/freedom_patches/better_handlebars_errors.rb
@@ -3,9 +3,8 @@
module Ember
module Handlebars
class Template
-
# Wrap in an IIFE in development mode to get the correct filename
- def compile_ember_handlebars(string, ember_template = 'Handlebars', options = nil)
+ def compile_ember_handlebars(string, ember_template = "Handlebars", options = nil)
if ::Rails.env.development?
"(function() { try { return Ember.#{ember_template}.compile(#{indent(string).inspect}); } catch(err) { throw err; } })()"
else
diff --git a/lib/freedom_patches/cose_rsapkcs1.rb b/lib/freedom_patches/cose_rsapkcs1.rb
index f6964e1835..55b639b5a5 100644
--- a/lib/freedom_patches/cose_rsapkcs1.rb
+++ b/lib/freedom_patches/cose_rsapkcs1.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require 'cose'
-require 'openssl/signature_algorithm/rsapkcs1'
+require "cose"
+require "openssl/signature_algorithm/rsapkcs1"
# 'cose' gem does not implement all algorithms from the Web Authentication
# (WebAuthn) standard specification. This patch implements one of the missing
@@ -38,11 +38,11 @@ module COSE
when OpenSSL::PKey::RSA
key
else
- raise(COSE::Error, 'Incompatible key for algorithm')
+ raise(COSE::Error, "Incompatible key for algorithm")
end
end
end
- register(RSAPKCS1.new(-257, 'RS256', hash_function: 'SHA256'))
+ register(RSAPKCS1.new(-257, "RS256", hash_function: "SHA256"))
end
end
diff --git a/lib/freedom_patches/fast_pluck.rb b/lib/freedom_patches/fast_pluck.rb
index ef467c5666..b33e7a2723 100644
--- a/lib/freedom_patches/fast_pluck.rb
+++ b/lib/freedom_patches/fast_pluck.rb
@@ -5,7 +5,6 @@
#
#
class ActiveRecord::Relation
-
# Note: In discourse, the following code is included in lib/sql_builder.rb
#
# class RailsDateTimeDecoder < PG::SimpleDecoder
@@ -43,7 +42,8 @@ class ActiveRecord::Relation
end
def pluck(*column_names)
- if loaded? && (column_names.map(&:to_s) - @klass.attribute_names - @klass.attribute_aliases.keys).empty?
+ if loaded? &&
+ (column_names.map(&:to_s) - @klass.attribute_names - @klass.attribute_aliases.keys).empty?
return records.pluck(*column_names)
end
@@ -55,10 +55,12 @@ class ActiveRecord::Relation
relation.select_values = column_names
- klass.connection.select_raw(relation.arel) do |result, _|
- result.type_map = DB.type_map
- result.nfields == 1 ? result.column_values(0) : result.values
- end
+ klass
+ .connection
+ .select_raw(relation.arel) do |result, _|
+ result.type_map = DB.type_map
+ result.nfields == 1 ? result.column_values(0) : result.values
+ end
end
end
end
diff --git a/lib/freedom_patches/inflector_backport.rb b/lib/freedom_patches/inflector_backport.rb
index 67547cddad..7b1d02d404 100644
--- a/lib/freedom_patches/inflector_backport.rb
+++ b/lib/freedom_patches/inflector_backport.rb
@@ -6,7 +6,6 @@
module ActiveSupport
module Inflector
-
LRU_CACHE_SIZE = 200
LRU_CACHES = []
@@ -22,26 +21,30 @@ module ActiveSupport
uncached = "#{method_name}_without_cache"
alias_method uncached, method_name
- m = define_method(method_name) do |*arguments|
- # this avoids recursive locks
- found = true
- data = cache.fetch(arguments) { found = false }
- unless found
- cache[arguments] = data = public_send(uncached, *arguments)
+ m =
+ define_method(method_name) do |*arguments|
+ # this avoids recursive locks
+ found = true
+ data = cache.fetch(arguments) { found = false }
+ cache[arguments] = data = public_send(uncached, *arguments) unless found
+ # so cache is never corrupted
+ data.dup
end
- # so cache is never corrupted
- data.dup
- end
# https://bugs.ruby-lang.org/issues/16897
- if Module.respond_to?(:ruby2_keywords, true)
- ruby2_keywords(m)
- end
+ ruby2_keywords(m) if Module.respond_to?(:ruby2_keywords, true)
end
end
- memoize :pluralize, :singularize, :camelize, :underscore, :humanize,
- :titleize, :tableize, :classify, :foreign_key
+ memoize :pluralize,
+ :singularize,
+ :camelize,
+ :underscore,
+ :humanize,
+ :titleize,
+ :tableize,
+ :classify,
+ :foreign_key
end
end
diff --git a/lib/freedom_patches/ip_addr.rb b/lib/freedom_patches/ip_addr.rb
index e9b118b8d0..9c7053d66c 100644
--- a/lib/freedom_patches/ip_addr.rb
+++ b/lib/freedom_patches/ip_addr.rb
@@ -1,29 +1,28 @@
# frozen_string_literal: true
class IPAddr
-
def self.handle_wildcards(val)
return if val.blank?
- num_wildcards = val.count('*')
+ num_wildcards = val.count("*")
return val if num_wildcards == 0
# strip ranges like "/16" from the end if present
- v = val.gsub(/\/.*/, '')
+ v = val.gsub(%r{/.*}, "")
- return if v[v.index('*')..-1] =~ /[^\.\*]/
+ return if v[v.index("*")..-1] =~ /[^\.\*]/
- parts = v.split('.')
- (4 - parts.size).times { parts << '*' } # support strings like 192.*
- v = parts.join('.')
+ parts = v.split(".")
+ (4 - parts.size).times { parts << "*" } # support strings like 192.*
+ v = parts.join(".")
- "#{v.tr('*', '0')}/#{32 - (v.count('*') * 8)}"
+ "#{v.tr("*", "0")}/#{32 - (v.count("*") * 8)}"
end
def to_cidr_s
if @addr
- mask = @mask_addr.to_s(2).count('1')
+ mask = @mask_addr.to_s(2).count("1")
if mask == 32
to_s
else
@@ -33,5 +32,4 @@ class IPAddr
nil
end
end
-
end
diff --git a/lib/freedom_patches/mail_disable_starttls.rb b/lib/freedom_patches/mail_disable_starttls.rb
index 45daba893a..a8a60bda86 100644
--- a/lib/freedom_patches/mail_disable_starttls.rb
+++ b/lib/freedom_patches/mail_disable_starttls.rb
@@ -11,9 +11,7 @@ module FreedomPatches
def build_smtp_session
super.tap do |smtp|
unless settings[:enable_starttls_auto]
- if smtp.respond_to?(:disable_starttls)
- smtp.disable_starttls
- end
+ smtp.disable_starttls if smtp.respond_to?(:disable_starttls)
end
end
end
diff --git a/lib/freedom_patches/rails4.rb b/lib/freedom_patches/rails4.rb
index 894c3dad90..806813b7c8 100644
--- a/lib/freedom_patches/rails4.rb
+++ b/lib/freedom_patches/rails4.rb
@@ -5,11 +5,13 @@
# Backporting a fix to rails itself may get too complex
module FreedomPatches
module Rails4
-
- def self.distance_of_time_in_words(from_time, to_time = 0, include_seconds = false, options = {})
- options = {
- scope: :'datetime.distance_in_words',
- }.merge!(options)
+ def self.distance_of_time_in_words(
+ from_time,
+ to_time = 0,
+ include_seconds = false,
+ options = {}
+ )
+ options = { scope: :"datetime.distance_in_words" }.merge!(options)
from_time = from_time.to_time if from_time.respond_to?(:to_time)
to_time = to_time.to_time if to_time.respond_to?(:to_time)
@@ -20,49 +22,68 @@ module FreedomPatches
I18n.with_options locale: options[:locale], scope: options[:scope] do |locale|
case distance_in_minutes
when 0..1
- return distance_in_minutes == 0 ?
- locale.t(:less_than_x_minutes, count: 1) :
- locale.t(:x_minutes, count: distance_in_minutes) unless include_seconds
+ unless include_seconds
+ return(
+ (
+ if distance_in_minutes == 0
+ locale.t(:less_than_x_minutes, count: 1)
+ else
+ locale.t(:x_minutes, count: distance_in_minutes)
+ end
+ )
+ )
+ end
- case distance_in_seconds
- when 0..4 then locale.t :less_than_x_seconds, count: 5
- when 5..9 then locale.t :less_than_x_seconds, count: 10
- when 10..19 then locale.t :less_than_x_seconds, count: 20
- when 20..39 then locale.t :half_a_minute
- when 40..59 then locale.t :less_than_x_minutes, count: 1
- else locale.t :x_minutes, count: 1
- end
-
- when 2..44 then locale.t :x_minutes, count: distance_in_minutes
- when 45..89 then locale.t :about_x_hours, count: 1
- when 90..1439 then locale.t :about_x_hours, count: (distance_in_minutes.to_f / 60.0).round
- when 1440..2519 then locale.t :x_days, count: 1
+ case distance_in_seconds
+ when 0..4
+ locale.t :less_than_x_seconds, count: 5
+ when 5..9
+ locale.t :less_than_x_seconds, count: 10
+ when 10..19
+ locale.t :less_than_x_seconds, count: 20
+ when 20..39
+ locale.t :half_a_minute
+ when 40..59
+ locale.t :less_than_x_minutes, count: 1
+ else
+ locale.t :x_minutes, count: 1
+ end
+ when 2..44
+ locale.t :x_minutes, count: distance_in_minutes
+ when 45..89
+ locale.t :about_x_hours, count: 1
+ when 90..1439
+ locale.t :about_x_hours, count: (distance_in_minutes.to_f / 60.0).round
+ when 1440..2519
+ locale.t :x_days, count: 1
# this is were we diverge from Rails
- when 2520..129599 then locale.t :x_days, count: (distance_in_minutes.to_f / 1440.0).round
- when 129600..525599 then locale.t :x_months, count: (distance_in_minutes.to_f / 43200.0).round
- else
+ when 2520..129_599
+ locale.t :x_days, count: (distance_in_minutes.to_f / 1440.0).round
+ when 129_600..525_599
+ locale.t :x_months, count: (distance_in_minutes.to_f / 43200.0).round
+ else
fyear = from_time.year
- fyear += 1 if from_time.month >= 3
- tyear = to_time.year
- tyear -= 1 if to_time.month < 3
- leap_years = (fyear > tyear) ? 0 : (fyear..tyear).count { |x| Date.leap?(x) }
- minute_offset_for_leap_year = leap_years * 1440
- # Discount the leap year days when calculating year distance.
- # e.g. if there are 20 leap year days between 2 dates having the same day
- # and month then the based on 365 days calculation
- # the distance in years will come out to over 80 years when in written
- # english it would read better as about 80 years.
- minutes_with_offset = distance_in_minutes - minute_offset_for_leap_year
- remainder = (minutes_with_offset % 525600)
- distance_in_years = (minutes_with_offset / 525600)
- if remainder < 131400
- locale.t(:about_x_years, count: distance_in_years)
- elsif remainder < 394200
- locale.t(:over_x_years, count: distance_in_years)
- else
- locale.t(:almost_x_years, count: distance_in_years + 1)
- end
+ fyear += 1 if from_time.month >= 3
+ tyear = to_time.year
+ tyear -= 1 if to_time.month < 3
+ leap_years = (fyear > tyear) ? 0 : (fyear..tyear).count { |x| Date.leap?(x) }
+ minute_offset_for_leap_year = leap_years * 1440
+ # Discount the leap year days when calculating year distance.
+ # e.g. if there are 20 leap year days between 2 dates having the same day
+ # and month then the based on 365 days calculation
+ # the distance in years will come out to over 80 years when in written
+ # english it would read better as about 80 years.
+ minutes_with_offset = distance_in_minutes - minute_offset_for_leap_year
+ remainder = (minutes_with_offset % 525_600)
+ distance_in_years = (minutes_with_offset / 525_600)
+ if remainder < 131_400
+ locale.t(:about_x_years, count: distance_in_years)
+ elsif remainder < 394_200
+ locale.t(:over_x_years, count: distance_in_years)
+ else
+ locale.t(:almost_x_years, count: distance_in_years + 1)
+ end
end
end
end
diff --git a/lib/freedom_patches/rails_multisite.rb b/lib/freedom_patches/rails_multisite.rb
index 99244669d7..0a78baaf32 100644
--- a/lib/freedom_patches/rails_multisite.rb
+++ b/lib/freedom_patches/rails_multisite.rb
@@ -19,12 +19,10 @@ module RailsMultisite
handler
end
- ActiveRecord::Base.connected_to(role: reading_role) do
- yield(db) if block_given?
- end
+ ActiveRecord::Base.connected_to(role: reading_role) { yield(db) if block_given? }
rescue => e
- STDERR.puts "URGENT: Failed to initialize site #{db}: "\
- "#{e.class} #{e.message}\n#{e.backtrace.join("\n")}"
+ STDERR.puts "URGENT: Failed to initialize site #{db}: " \
+ "#{e.class} #{e.message}\n#{e.backtrace.join("\n")}"
# the show must go on, don't stop startup if multisite fails
end
@@ -34,11 +32,7 @@ module RailsMultisite
class DiscoursePatches
def self.config
- {
- db_lookup: lambda do |env|
- env["PATH_INFO"] == "/srv/status" ? "default" : nil
- end
- }
+ { db_lookup: lambda { |env| env["PATH_INFO"] == "/srv/status" ? "default" : nil } }
end
end
end
diff --git a/lib/freedom_patches/safe_buffer.rb b/lib/freedom_patches/safe_buffer.rb
index 5bc2feb01e..efc6880b6d 100644
--- a/lib/freedom_patches/safe_buffer.rb
+++ b/lib/freedom_patches/safe_buffer.rb
@@ -12,7 +12,8 @@ module FreedomPatches
rescue Encoding::CompatibilityError
raise if raise_encoding_err
- encoding_diags = +"internal encoding #{Encoding.default_internal}, external encoding #{Encoding.default_external}"
+ encoding_diags =
+ +"internal encoding #{Encoding.default_internal}, external encoding #{Encoding.default_external}"
if encoding != Encoding::UTF_8
encoding_diags << " my encoding is #{encoding} "
force_encoding("UTF-8")
@@ -20,12 +21,16 @@ module FreedomPatches
encode!("utf-16", "utf-8", invalid: :replace)
encode!("utf-8", "utf-16")
end
- Rails.logger.warn("Encountered a non UTF-8 string in SafeBuffer - #{self} - #{encoding_diags}")
+ Rails.logger.warn(
+ "Encountered a non UTF-8 string in SafeBuffer - #{self} - #{encoding_diags}",
+ )
end
if value.encoding != Encoding::UTF_8
encoding_diags << " attempted to append encoding #{value.encoding} "
value = value.dup.force_encoding("UTF-8").scrub
- Rails.logger.warn("Attempted to concat a non UTF-8 string in SafeBuffer - #{value} - #{encoding_diags}")
+ Rails.logger.warn(
+ "Attempted to concat a non UTF-8 string in SafeBuffer - #{value} - #{encoding_diags}",
+ )
end
concat(value, _raise = true)
end
diff --git a/lib/freedom_patches/safe_migrations.rb b/lib/freedom_patches/safe_migrations.rb
index cdf989fdd5..61f9fe4754 100644
--- a/lib/freedom_patches/safe_migrations.rb
+++ b/lib/freedom_patches/safe_migrations.rb
@@ -5,7 +5,7 @@
# which rake:multisite_migrate uses
#
# The protection is only needed in Dev and Test
-if ENV['RAILS_ENV'] != "production"
- require_dependency 'migration/safe_migrate'
+if ENV["RAILS_ENV"] != "production"
+ require_dependency "migration/safe_migrate"
Migration::SafeMigrate.patch_active_record!
end
diff --git a/lib/freedom_patches/schema_migration_details.rb b/lib/freedom_patches/schema_migration_details.rb
index 6b72cc8fdc..c02d9c51ac 100644
--- a/lib/freedom_patches/schema_migration_details.rb
+++ b/lib/freedom_patches/schema_migration_details.rb
@@ -5,9 +5,7 @@ module FreedomPatches
def exec_migration(conn, direction)
rval = nil
- time = Benchmark.measure do
- rval = super
- end
+ time = Benchmark.measure { rval = super }
sql = < err
Discourse.warn_exception(
- err, message: "Unexpected error when checking SMTP credentials for group #{group.id} (#{group.name})."
+ err,
+ message:
+ "Unexpected error when checking SMTP credentials for group #{group.id} (#{group.name}).",
)
nil
end
diff --git a/lib/guardian.rb b/lib/guardian.rb
index e6a4ad76ac..1a9b2dd52f 100644
--- a/lib/guardian.rb
+++ b/lib/guardian.rb
@@ -1,14 +1,14 @@
# frozen_string_literal: true
-require 'guardian/category_guardian'
-require 'guardian/ensure_magic'
-require 'guardian/post_guardian'
-require 'guardian/bookmark_guardian'
-require 'guardian/topic_guardian'
-require 'guardian/user_guardian'
-require 'guardian/post_revision_guardian'
-require 'guardian/group_guardian'
-require 'guardian/tag_guardian'
+require "guardian/category_guardian"
+require "guardian/ensure_magic"
+require "guardian/post_guardian"
+require "guardian/bookmark_guardian"
+require "guardian/topic_guardian"
+require "guardian/user_guardian"
+require "guardian/post_revision_guardian"
+require "guardian/group_guardian"
+require "guardian/tag_guardian"
# The guardian is responsible for confirming access to various site resources and operations
class Guardian
@@ -89,7 +89,7 @@ class Guardian
def user
@user.presence
end
- alias :current_user :user
+ alias current_user user
def anonymous?
!authenticated?
@@ -127,7 +127,9 @@ class Guardian
if @category_group_moderator_groups.key?(reviewable_by_group_id)
@category_group_moderator_groups[reviewable_by_group_id]
else
- @category_group_moderator_groups[reviewable_by_group_id] = category_group_moderator_scope.exists?("categories.id": category.id)
+ @category_group_moderator_groups[
+ reviewable_by_group_id
+ ] = category_group_moderator_scope.exists?("categories.id": category.id)
end
end
@@ -136,16 +138,14 @@ class Guardian
end
def is_developer?
- @user &&
- is_admin? &&
- (
- Rails.env.development? ||
- Developer.user_ids.include?(@user.id) ||
+ @user && is_admin? &&
(
- Rails.configuration.respond_to?(:developer_emails) &&
- Rails.configuration.developer_emails.include?(@user.email)
+ Rails.env.development? || Developer.user_ids.include?(@user.id) ||
+ (
+ Rails.configuration.respond_to?(:developer_emails) &&
+ Rails.configuration.developer_emails.include?(@user.email)
+ )
)
- )
end
def is_staged?
@@ -203,12 +203,13 @@ class Guardian
end
def can_moderate?(obj)
- obj && authenticated? && !is_silenced? && (
- is_staff? ||
- (obj.is_a?(Topic) && @user.has_trust_level?(TrustLevel[4]) && can_see_topic?(obj))
- )
+ obj && authenticated? && !is_silenced? &&
+ (
+ is_staff? ||
+ (obj.is_a?(Topic) && @user.has_trust_level?(TrustLevel[4]) && can_see_topic?(obj))
+ )
end
- alias :can_see_flags? :can_moderate?
+ alias can_see_flags? can_moderate?
def can_tag?(topic)
return false if topic.blank?
@@ -229,9 +230,7 @@ class Guardian
end
def can_delete_reviewable_queued_post?(reviewable)
- reviewable.present? &&
- authenticated? &&
- reviewable.created_by_id == @user.id
+ reviewable.present? && authenticated? && reviewable.created_by_id == @user.id
end
def can_see_group?(group)
@@ -243,7 +242,9 @@ class Guardian
return true if is_admin? || group.members_visibility_level == Group.visibility_levels[:public]
return true if is_staff? && group.members_visibility_level == Group.visibility_levels[:staff]
return true if is_staff? && group.members_visibility_level == Group.visibility_levels[:members]
- return true if authenticated? && group.members_visibility_level == Group.visibility_levels[:logged_on_users]
+ if authenticated? && group.members_visibility_level == Group.visibility_levels[:logged_on_users]
+ return true
+ end
return false if user.blank?
return false unless membership = GroupUser.find_by(group_id: group.id, user_id: user.id)
@@ -257,10 +258,19 @@ class Guardian
def can_see_groups?(groups)
return false if groups.blank?
- return true if is_admin? || groups.all? { |g| g.visibility_level == Group.visibility_levels[:public] }
- return true if is_staff? && groups.all? { |g| g.visibility_level == Group.visibility_levels[:staff] }
- return true if is_staff? && groups.all? { |g| g.visibility_level == Group.visibility_levels[:members] }
- return true if authenticated? && groups.all? { |g| g.visibility_level == Group.visibility_levels[:logged_on_users] }
+ if is_admin? || groups.all? { |g| g.visibility_level == Group.visibility_levels[:public] }
+ return true
+ end
+ if is_staff? && groups.all? { |g| g.visibility_level == Group.visibility_levels[:staff] }
+ return true
+ end
+ if is_staff? && groups.all? { |g| g.visibility_level == Group.visibility_levels[:members] }
+ return true
+ end
+ if authenticated? &&
+ groups.all? { |g| g.visibility_level == Group.visibility_levels[:logged_on_users] }
+ return true
+ end
return false if user.blank?
memberships = GroupUser.where(group: groups, user_id: user.id).pluck(:owner)
@@ -277,7 +287,8 @@ class Guardian
return false if groups.blank?
requested_group_ids = groups.map(&:id) # Can't use pluck, groups could be a regular array
- matching_group_ids = Group.where(id: requested_group_ids).members_visible_groups(user).pluck(:id)
+ matching_group_ids =
+ Group.where(id: requested_group_ids).members_visible_groups(user).pluck(:id)
matching_group_ids.sort == requested_group_ids.sort
end
@@ -285,12 +296,10 @@ class Guardian
# Can we impersonate this user?
def can_impersonate?(target)
target &&
-
- # You must be an admin to impersonate
- is_admin? &&
-
- # You may not impersonate other admins unless you are a dev
- (!target.admin? || is_developer?)
+ # You must be an admin to impersonate
+ is_admin? &&
+ # You may not impersonate other admins unless you are a dev
+ (!target.admin? || is_developer?)
# Additionally, you may not impersonate yourself;
# but the two tests for different admin statuses
@@ -313,7 +322,7 @@ class Guardian
def can_suspend?(user)
user && is_staff? && user.regular?
end
- alias :can_deactivate? :can_suspend?
+ alias can_deactivate? can_suspend?
def can_revoke_admin?(admin)
can_administer_user?(admin) && admin.admin?
@@ -337,10 +346,13 @@ class Guardian
return true if title.empty? # A title set to '(none)' in the UI is an empty string
return false if user != @user
- return true if user.badges
- .where(allow_title: true)
- .pluck(:name)
- .any? { |name| Badge.display_name(name) == title }
+ if user
+ .badges
+ .where(allow_title: true)
+ .pluck(:name)
+ .any? { |name| Badge.display_name(name) == title }
+ return true
+ end
user.groups.where(title: title).exists?
end
@@ -349,13 +361,13 @@ class Guardian
return false if !user || !group_id
group = Group.find_by(id: group_id.to_i)
- user.group_ids.include?(group_id.to_i) &&
- (group ? !group.automatic : false)
+ user.group_ids.include?(group_id.to_i) && (group ? !group.automatic : false)
end
def can_use_flair_group?(user, group_id = nil)
return false if !user || !group_id || !user.group_ids.include?(group_id.to_i)
- flair_icon, flair_upload_id = Group.where(id: group_id.to_i).pluck_first(:flair_icon, :flair_upload_id)
+ flair_icon, flair_upload_id =
+ Group.where(id: group_id.to_i).pluck_first(:flair_icon, :flair_upload_id)
flair_icon.present? || flair_upload_id.present?
end
@@ -387,10 +399,9 @@ class Guardian
end
def can_invite_to_forum?(groups = nil)
- authenticated? &&
- (is_staff? || SiteSetting.max_invites_per_day.to_i.positive?) &&
- (is_staff? || @user.has_trust_level?(SiteSetting.min_trust_level_to_allow_invite.to_i)) &&
- (is_admin? || groups.blank? || groups.all? { |g| can_edit_group?(g) })
+ authenticated? && (is_staff? || SiteSetting.max_invites_per_day.to_i.positive?) &&
+ (is_staff? || @user.has_trust_level?(SiteSetting.min_trust_level_to_allow_invite.to_i)) &&
+ (is_admin? || groups.blank? || groups.all? { |g| can_edit_group?(g) })
end
def can_invite_to?(object, groups = nil)
@@ -402,9 +413,7 @@ class Guardian
if object.private_message?
return true if is_admin?
- if !@user.in_any_groups?(SiteSetting.personal_message_enabled_groups_map)
- return false
- end
+ return false if !@user.in_any_groups?(SiteSetting.personal_message_enabled_groups_map)
return false if object.reached_recipients_limit? && !is_staff?
end
@@ -441,8 +450,7 @@ class Guardian
end
def can_invite_group_to_private_message?(group, topic)
- can_see_topic?(topic) &&
- can_send_private_message?(group)
+ can_see_topic?(topic) && can_send_private_message?(group)
end
##
@@ -459,8 +467,11 @@ class Guardian
# User is authenticated
authenticated? &&
# User can send PMs, this can be covered by trust levels as well via AUTO_GROUPS
- (is_staff? || from_bot || from_system || \
- (@user.in_any_groups?(SiteSetting.personal_message_enabled_groups_map)) || notify_moderators)
+ (
+ is_staff? || from_bot || from_system ||
+ (@user.in_any_groups?(SiteSetting.personal_message_enabled_groups_map)) ||
+ notify_moderators
+ )
end
##
@@ -480,14 +491,14 @@ class Guardian
# User is authenticated and can send PMs, this can be covered by trust levels as well via AUTO_GROUPS
(can_send_private_messages?(notify_moderators: notify_moderators) || group_is_messageable) &&
- # User disabled private message
- (is_staff? || target_is_group || target.user_option.allow_private_messages) &&
- # Can't send PMs to suspended users
- (is_staff? || target_is_group || !target.suspended?) &&
- # Check group messageable level
- (from_system || target_is_user || group_is_messageable || notify_moderators) &&
- # Silenced users can only send PM to staff
- (!is_silenced? || target.staff?)
+ # User disabled private message
+ (is_staff? || target_is_group || target.user_option.allow_private_messages) &&
+ # Can't send PMs to suspended users
+ (is_staff? || target_is_group || !target.suspended?) &&
+ # Check group messageable level
+ (from_system || target_is_user || group_is_messageable || notify_moderators) &&
+ # Silenced users can only send PM to staff
+ (!is_silenced? || target.staff?)
end
def can_send_private_messages_to_email?
@@ -503,17 +514,18 @@ class Guardian
def can_export_entity?(entity)
return false if anonymous?
return true if is_admin?
- return entity != 'user_list' if is_moderator?
+ return entity != "user_list" if is_moderator?
# Regular users can only export their archives
return false unless entity == "user_archive"
- UserExport.where(user_id: @user.id, created_at: (Time.zone.now.beginning_of_day..Time.zone.now.end_of_day)).count == 0
+ UserExport.where(
+ user_id: @user.id,
+ created_at: (Time.zone.now.beginning_of_day..Time.zone.now.end_of_day),
+ ).count == 0
end
def can_mute_user?(target_user)
- can_mute_users? &&
- @user.id != target_user.id &&
- !target_user.staff?
+ can_mute_users? && @user.id != target_user.id && !target_user.staff?
end
def can_mute_users?
@@ -546,20 +558,15 @@ class Guardian
return true if theme_ids.blank?
if allowed_theme_ids = Theme.allowed_remote_theme_ids
- if (theme_ids - allowed_theme_ids).present?
- return false
- end
+ return false if (theme_ids - allowed_theme_ids).present?
end
- if include_preview && is_staff? && (theme_ids - Theme.theme_ids).blank?
- return true
- end
+ return true if include_preview && is_staff? && (theme_ids - Theme.theme_ids).blank?
parent = theme_ids.first
components = theme_ids[1..-1] || []
- Theme.user_theme_ids.include?(parent) &&
- (components - Theme.components_for(parent)).empty?
+ Theme.user_theme_ids.include?(parent) && (components - Theme.components_for(parent)).empty?
end
def can_publish_page?(topic)
@@ -608,7 +615,6 @@ class Guardian
private
def is_my_own?(obj)
-
unless anonymous?
return obj.user_id == @user.id if obj.respond_to?(:user_id) && obj.user_id && @user.id
return obj.user == @user if obj.respond_to?(:user)
@@ -650,9 +656,8 @@ class Guardian
end
def category_group_moderator_scope
- Category
- .joins("INNER JOIN group_users ON group_users.group_id = categories.reviewable_by_group_id")
- .where("group_users.user_id = ?", user.id)
+ Category.joins(
+ "INNER JOIN group_users ON group_users.group_id = categories.reviewable_by_group_id",
+ ).where("group_users.user_id = ?", user.id)
end
-
end
diff --git a/lib/guardian/category_guardian.rb b/lib/guardian/category_guardian.rb
index 35bf8d30af..e437455dfc 100644
--- a/lib/guardian/category_guardian.rb
+++ b/lib/guardian/category_guardian.rb
@@ -2,40 +2,31 @@
#mixin for all guardian methods dealing with category permissions
module CategoryGuardian
-
# Creating Method
def can_create_category?(parent = nil)
- is_admin? ||
- (
- SiteSetting.moderators_manage_categories_and_groups &&
- is_moderator?
- )
+ is_admin? || (SiteSetting.moderators_manage_categories_and_groups && is_moderator?)
end
# Editing Method
def can_edit_category?(category)
is_admin? ||
- (
- SiteSetting.moderators_manage_categories_and_groups &&
- is_moderator? &&
- can_see_category?(category)
- )
+ (
+ SiteSetting.moderators_manage_categories_and_groups && is_moderator? &&
+ can_see_category?(category)
+ )
end
def can_edit_serialized_category?(category_id:, read_restricted:)
is_admin? ||
- (
- SiteSetting.moderators_manage_categories_and_groups &&
- is_moderator? &&
- can_see_serialized_category?(category_id: category_id, read_restricted: read_restricted)
- )
+ (
+ SiteSetting.moderators_manage_categories_and_groups && is_moderator? &&
+ can_see_serialized_category?(category_id: category_id, read_restricted: read_restricted)
+ )
end
def can_delete_category?(category)
- can_edit_category?(category) &&
- category.topic_count <= 0 &&
- !category.uncategorized? &&
- !category.has_children?
+ can_edit_category?(category) && category.topic_count <= 0 && !category.uncategorized? &&
+ !category.has_children?
end
def can_see_serialized_category?(category_id:, read_restricted: true)
@@ -84,6 +75,7 @@ module CategoryGuardian
end
def topic_featured_link_allowed_category_ids
- @topic_featured_link_allowed_category_ids = Category.where(topic_featured_link_allowed: true).pluck(:id)
+ @topic_featured_link_allowed_category_ids =
+ Category.where(topic_featured_link_allowed: true).pluck(:id)
end
end
diff --git a/lib/guardian/ensure_magic.rb b/lib/guardian/ensure_magic.rb
index bff9f402dc..62cece83b6 100644
--- a/lib/guardian/ensure_magic.rb
+++ b/lib/guardian/ensure_magic.rb
@@ -2,13 +2,14 @@
# Support for ensure_{blah}! methods.
module EnsureMagic
-
def method_missing(method, *args, &block)
if method.to_s =~ /^ensure_(.*)\!$/
can_method = :"#{Regexp.last_match[1]}?"
if respond_to?(can_method)
- raise Discourse::InvalidAccess.new("#{can_method} failed") unless send(can_method, *args, &block)
+ unless send(can_method, *args, &block)
+ raise Discourse::InvalidAccess.new("#{can_method} failed")
+ end
return
end
end
@@ -20,5 +21,4 @@ module EnsureMagic
def ensure_can_see!(obj)
raise Discourse::InvalidAccess.new("Can't see #{obj}") unless can_see?(obj)
end
-
end
diff --git a/lib/guardian/group_guardian.rb b/lib/guardian/group_guardian.rb
index b3e571776c..7b153615a1 100644
--- a/lib/guardian/group_guardian.rb
+++ b/lib/guardian/group_guardian.rb
@@ -2,14 +2,9 @@
#mixin for all guardian methods dealing with group permissions
module GroupGuardian
-
# Creating Method
def can_create_group?
- is_admin? ||
- (
- SiteSetting.moderators_manage_categories_and_groups &&
- is_moderator?
- )
+ is_admin? || (SiteSetting.moderators_manage_categories_and_groups && is_moderator?)
end
# Edit authority for groups means membership changes only.
@@ -17,17 +12,15 @@ module GroupGuardian
# table and thus do not allow membership changes.
def can_edit_group?(group)
!group.automatic &&
- (can_admin_group?(group) || group.users.where('group_users.owner').include?(user))
+ (can_admin_group?(group) || group.users.where("group_users.owner").include?(user))
end
def can_admin_group?(group)
is_admin? ||
- (
- SiteSetting.moderators_manage_categories_and_groups &&
- is_moderator? &&
- can_see?(group) &&
- group.id != Group::AUTO_GROUPS[:admins]
- )
+ (
+ SiteSetting.moderators_manage_categories_and_groups && is_moderator? && can_see?(group) &&
+ group.id != Group::AUTO_GROUPS[:admins]
+ )
end
def can_see_group_messages?(group)
diff --git a/lib/guardian/post_guardian.rb b/lib/guardian/post_guardian.rb
index 49205fcc16..6ca1256896 100644
--- a/lib/guardian/post_guardian.rb
+++ b/lib/guardian/post_guardian.rb
@@ -2,26 +2,24 @@
# mixin for all guardian methods dealing with post permissions
module PostGuardian
-
def unrestricted_link_posting?
authenticated? && @user.has_trust_level?(TrustLevel[SiteSetting.min_trust_to_post_links])
end
def link_posting_access
if unrestricted_link_posting?
- 'full'
+ "full"
elsif SiteSetting.allowed_link_domains.present?
- 'limited'
+ "limited"
else
- 'none'
+ "none"
end
end
def can_post_link?(host: nil)
return false if host.blank?
- unrestricted_link_posting? ||
- SiteSetting.allowed_link_domains.split('|').include?(host)
+ unrestricted_link_posting? || SiteSetting.allowed_link_domains.split("|").include?(host)
end
# Can the user act on the post in a particular way.
@@ -30,47 +28,55 @@ module PostGuardian
return false unless (can_see_post.nil? && can_see_post?(post)) || can_see_post
# no warnings except for staff
- return false if action_key == :notify_user && (post.user.blank? || (!is_staff? && opts[:is_warning].present? && opts[:is_warning] == 'true'))
+ if action_key == :notify_user &&
+ (
+ post.user.blank? ||
+ (!is_staff? && opts[:is_warning].present? && opts[:is_warning] == "true")
+ )
+ return false
+ end
taken = opts[:taken_actions].try(:keys).to_a
- is_flag = PostActionType.notify_flag_types[action_key] || PostActionType.custom_types[action_key]
+ is_flag =
+ PostActionType.notify_flag_types[action_key] || PostActionType.custom_types[action_key]
already_taken_this_action = taken.any? && taken.include?(PostActionType.types[action_key])
- already_did_flagging = taken.any? && (taken & PostActionType.notify_flag_types.values).any?
+ already_did_flagging = taken.any? && (taken & PostActionType.notify_flag_types.values).any?
- result = if authenticated? && post && !@user.anonymous?
+ result =
+ if authenticated? && post && !@user.anonymous?
+ # Silenced users can't flag
+ return false if is_flag && @user.silenced?
- # Silenced users can't flag
- return false if is_flag && @user.silenced?
+ # Hidden posts can't be flagged
+ return false if is_flag && post.hidden?
- # Hidden posts can't be flagged
- return false if is_flag && post.hidden?
+ # post made by staff, but we don't allow staff flags
+ return false if is_flag && (!SiteSetting.allow_flagging_staff?) && post&.user&.staff?
- # post made by staff, but we don't allow staff flags
- return false if is_flag &&
- (!SiteSetting.allow_flagging_staff?) &&
- post&.user&.staff?
+ if action_key == :notify_user &&
+ !@user.in_any_groups?(SiteSetting.personal_message_enabled_groups_map)
+ return false
+ end
- if action_key == :notify_user && !@user.in_any_groups?(SiteSetting.personal_message_enabled_groups_map)
- return false
+ # we allow flagging for trust level 1 and higher
+ # always allowed for private messages
+ (
+ is_flag && not(already_did_flagging) &&
+ (
+ @user.has_trust_level?(TrustLevel[SiteSetting.min_trust_to_flag_posts]) ||
+ post.topic.private_message?
+ )
+ ) ||
+ # not a flagging action, and haven't done it already
+ not(is_flag || already_taken_this_action) &&
+ # nothing except flagging on archived topics
+ not(post.topic&.archived?) &&
+ # nothing except flagging on deleted posts
+ not(post.trashed?) &&
+ # don't like your own stuff
+ not(action_key == :like && (post.user.blank? || is_my_own?(post)))
end
- # we allow flagging for trust level 1 and higher
- # always allowed for private messages
- (is_flag && not(already_did_flagging) && (@user.has_trust_level?(TrustLevel[SiteSetting.min_trust_to_flag_posts]) || post.topic.private_message?)) ||
-
- # not a flagging action, and haven't done it already
- not(is_flag || already_taken_this_action) &&
-
- # nothing except flagging on archived topics
- not(post.topic&.archived?) &&
-
- # nothing except flagging on deleted posts
- not(post.trashed?) &&
-
- # don't like your own stuff
- not(action_key == :like && (post.user.blank? || is_my_own?(post)))
- end
-
!!result
end
@@ -94,12 +100,16 @@ module PostGuardian
end
def can_delete_all_posts?(user)
- is_staff? &&
- user &&
- !user.admin? &&
- (is_admin? ||
- ((user.first_post_created_at.nil? || user.first_post_created_at >= SiteSetting.delete_user_max_post_age.days.ago) &&
- user.post_count <= SiteSetting.delete_all_posts_max.to_i))
+ is_staff? && user && !user.admin? &&
+ (
+ is_admin? ||
+ (
+ (
+ user.first_post_created_at.nil? ||
+ user.first_post_created_at >= SiteSetting.delete_user_max_post_age.days.ago
+ ) && user.post_count <= SiteSetting.delete_all_posts_max.to_i
+ )
+ )
end
def can_create_post?(topic)
@@ -108,53 +118,43 @@ module PostGuardian
key = topic_memoize_key(topic)
@can_create_post ||= {}
- @can_create_post.fetch(key) do
- @can_create_post[key] = can_create_post_in_topic?(topic)
- end
+ @can_create_post.fetch(key) { @can_create_post[key] = can_create_post_in_topic?(topic) }
end
def can_edit_post?(post)
- if Discourse.static_doc_topic_ids.include?(post.topic_id) && !is_admin?
- return false
- end
+ return false if Discourse.static_doc_topic_ids.include?(post.topic_id) && !is_admin?
return true if is_admin?
# Must be staff to edit a locked post
return false if post.locked? && !is_staff?
- return can_create_post?(post.topic) if (
- is_staff? ||
- (
- SiteSetting.trusted_users_can_edit_others? &&
- @user.has_trust_level?(TrustLevel[4])
- ) ||
- is_category_group_moderator?(post.topic&.category)
- )
-
- if post.topic&.archived? || post.user_deleted || post.deleted_at
- return false
+ if (
+ is_staff? ||
+ (SiteSetting.trusted_users_can_edit_others? && @user.has_trust_level?(TrustLevel[4])) ||
+ is_category_group_moderator?(post.topic&.category)
+ )
+ return can_create_post?(post.topic)
end
+ return false if post.topic&.archived? || post.user_deleted || post.deleted_at
+
# Editing a shared draft.
- return true if (
- can_see_post?(post) &&
- can_create_post?(post.topic) &&
- post.topic.category_id == SiteSetting.shared_drafts_category.to_i &&
- can_see_category?(post.topic.category) &&
- can_see_shared_draft?
- )
+ if (
+ can_see_post?(post) && can_create_post?(post.topic) &&
+ post.topic.category_id == SiteSetting.shared_drafts_category.to_i &&
+ can_see_category?(post.topic.category) && can_see_shared_draft?
+ )
+ return true
+ end
if post.wiki && (@user.trust_level >= SiteSetting.min_trust_to_edit_wiki_post.to_i)
return can_create_post?(post.topic)
end
- if @user.trust_level < SiteSetting.min_trust_to_edit_post
- return false
- end
+ return false if @user.trust_level < SiteSetting.min_trust_to_edit_post
if is_my_own?(post)
-
return false if @user.silenced?
return can_edit_hidden_post?(post) if post.hidden?
@@ -175,7 +175,8 @@ module PostGuardian
def can_edit_hidden_post?(post)
return false if post.nil?
- post.hidden_at.nil? || post.hidden_at < SiteSetting.cooldown_minutes_after_hiding_posts.minutes.ago
+ post.hidden_at.nil? ||
+ post.hidden_at < SiteSetting.cooldown_minutes_after_hiding_posts.minutes.ago
end
def can_delete_post_or_topic?(post)
@@ -195,7 +196,12 @@ module PostGuardian
# You can delete your own posts
if is_my_own?(post)
- return false if (SiteSetting.max_post_deletions_per_minute < 1 || SiteSetting.max_post_deletions_per_day < 1)
+ if (
+ SiteSetting.max_post_deletions_per_minute < 1 ||
+ SiteSetting.max_post_deletions_per_day < 1
+ )
+ return false
+ end
return true if !post.user_deleted?
end
@@ -208,7 +214,9 @@ module PostGuardian
return false if post.is_first_post?
return false if !is_admin? || !can_edit_post?(post)
return false if !post.deleted_at
- return false if post.deleted_by_id == @user.id && post.deleted_at >= Post::PERMANENT_DELETE_TIMER.ago
+ if post.deleted_by_id == @user.id && post.deleted_at >= Post::PERMANENT_DELETE_TIMER.ago
+ return false
+ end
true
end
@@ -220,7 +228,12 @@ module PostGuardian
return true if can_moderate_topic?(topic) && !!post.deleted_at
if is_my_own?(post)
- return false if (SiteSetting.max_post_deletions_per_minute < 1 || SiteSetting.max_post_deletions_per_day < 1)
+ if (
+ SiteSetting.max_post_deletions_per_minute < 1 ||
+ SiteSetting.max_post_deletions_per_day < 1
+ )
+ return false
+ end
return true if post.user_deleted && !post.deleted_at
end
@@ -230,19 +243,29 @@ module PostGuardian
def can_delete_post_action?(post_action)
return false unless is_my_own?(post_action) && !post_action.is_private_message?
- post_action.created_at > SiteSetting.post_undo_action_window_mins.minutes.ago && !post_action.post&.topic&.archived?
+ post_action.created_at > SiteSetting.post_undo_action_window_mins.minutes.ago &&
+ !post_action.post&.topic&.archived?
end
def can_see_post?(post)
return false if post.blank?
return true if is_admin?
return false unless can_see_post_topic?(post)
- return false unless post.user == @user || Topic.visible_post_types(@user).include?(post.post_type)
+ unless post.user == @user || Topic.visible_post_types(@user).include?(post.post_type)
+ return false
+ end
return true if is_moderator? || is_category_group_moderator?(post.topic.category)
- return true if post.deleted_at.blank? || (post.deleted_by_id == @user.id && @user.has_trust_level?(TrustLevel[4]))
+ return true if !post.trashed? || can_see_deleted_post?(post)
false
end
+ def can_see_deleted_post?(post)
+ return false if !post.trashed?
+ return false if @user.anonymous?
+ return true if is_staff?
+ post.deleted_by_id == @user.id && @user.has_trust_level?(TrustLevel[4])
+ end
+
def can_view_edit_history?(post)
return false unless post
@@ -250,9 +273,7 @@ module PostGuardian
return true if post.wiki || SiteSetting.edit_history_visible_to_public
end
- authenticated? &&
- (is_staff? || @user.id == post.user_id) &&
- can_see_post?(post)
+ authenticated? && (is_staff? || @user.id == post.user_id) && can_see_post?(post)
end
def can_change_post_owner?
@@ -308,13 +329,18 @@ module PostGuardian
private
def can_create_post_in_topic?(topic)
- return false if !SiteSetting.enable_system_message_replies? && topic.try(:subtype) == "system_message"
+ if !SiteSetting.enable_system_message_replies? && topic.try(:subtype) == "system_message"
+ return false
+ end
- (!SpamRule::AutoSilence.prevent_posting?(@user) || (!!topic.try(:private_message?) && topic.allowed_users.include?(@user))) && (
- !topic ||
- !topic.category ||
- Category.post_create_allowed(self).where(id: topic.category.id).count == 1
- )
+ (
+ !SpamRule::AutoSilence.prevent_posting?(@user) ||
+ (!!topic.try(:private_message?) && topic.allowed_users.include?(@user))
+ ) &&
+ (
+ !topic || !topic.category ||
+ Category.post_create_allowed(self).where(id: topic.category.id).count == 1
+ )
end
def topic_memoize_key(topic)
@@ -329,8 +355,6 @@ module PostGuardian
key = topic_memoize_key(topic)
@can_see_post_topic ||= {}
- @can_see_post_topic.fetch(key) do
- @can_see_post_topic[key] = can_see_topic?(topic)
- end
+ @can_see_post_topic.fetch(key) { @can_see_post_topic[key] = can_see_topic?(topic) }
end
end
diff --git a/lib/guardian/post_revision_guardian.rb b/lib/guardian/post_revision_guardian.rb
index 4372728b95..1e61b19e74 100644
--- a/lib/guardian/post_revision_guardian.rb
+++ b/lib/guardian/post_revision_guardian.rb
@@ -2,7 +2,6 @@
# mixin for all Guardian methods dealing with post_revisions permissions
module PostRevisionGuardian
-
def can_see_post_revision?(post_revision)
return false unless post_revision
return false if post_revision.hidden && !can_view_hidden_post_revisions?
@@ -21,5 +20,4 @@ module PostRevisionGuardian
def can_view_hidden_post_revisions?
is_staff?
end
-
end
diff --git a/lib/guardian/tag_guardian.rb b/lib/guardian/tag_guardian.rb
index 5a4be92ab4..db1ec7688c 100644
--- a/lib/guardian/tag_guardian.rb
+++ b/lib/guardian/tag_guardian.rb
@@ -3,11 +3,13 @@
#mixin for all guardian methods dealing with tagging permissions
module TagGuardian
def can_create_tag?
- SiteSetting.tagging_enabled && @user.has_trust_level_or_staff?(SiteSetting.min_trust_to_create_tag)
+ SiteSetting.tagging_enabled &&
+ @user.has_trust_level_or_staff?(SiteSetting.min_trust_to_create_tag)
end
def can_tag_topics?
- SiteSetting.tagging_enabled && @user.has_trust_level_or_staff?(SiteSetting.min_trust_level_to_tag_topics)
+ SiteSetting.tagging_enabled &&
+ @user.has_trust_level_or_staff?(SiteSetting.min_trust_level_to_tag_topics)
end
def can_tag_pms?
@@ -16,7 +18,8 @@ module TagGuardian
return true if @user == Discourse.system_user
group_ids = SiteSetting.pm_tags_allowed_for_groups_map
- group_ids.include?(Group::AUTO_GROUPS[:everyone]) || @user.group_users.exists?(group_id: group_ids)
+ group_ids.include?(Group::AUTO_GROUPS[:everyone]) ||
+ @user.group_users.exists?(group_id: group_ids)
end
def can_admin_tags?
@@ -28,12 +31,13 @@ module TagGuardian
end
def hidden_tag_names
- @hidden_tag_names ||= begin
- if SiteSetting.tagging_enabled && !is_staff?
- DiscourseTagging.hidden_tag_names(self)
- else
- []
+ @hidden_tag_names ||=
+ begin
+ if SiteSetting.tagging_enabled && !is_staff?
+ DiscourseTagging.hidden_tag_names(self)
+ else
+ []
+ end
end
- end
end
end
diff --git a/lib/guardian/topic_guardian.rb b/lib/guardian/topic_guardian.rb
index c487de6897..05fb6ad041 100644
--- a/lib/guardian/topic_guardian.rb
+++ b/lib/guardian/topic_guardian.rb
@@ -3,13 +3,11 @@
#mixin for all guardian methods dealing with topic permissions
module TopicGuardian
def can_remove_allowed_users?(topic, target_user = nil)
- is_staff? ||
- (topic.user == @user && @user.has_trust_level?(TrustLevel[2])) ||
- (
- topic.allowed_users.count > 1 &&
- topic.user != target_user &&
- !!(target_user && user == target_user)
- )
+ is_staff? || (topic.user == @user && @user.has_trust_level?(TrustLevel[2])) ||
+ (
+ topic.allowed_users.count > 1 && topic.user != target_user &&
+ !!(target_user && user == target_user)
+ )
end
def can_review_topic?(topic)
@@ -49,10 +47,10 @@ module TopicGuardian
# Creating Methods
def can_create_topic?(parent)
is_staff? ||
- (user &&
- user.trust_level >= SiteSetting.min_trust_to_create_topic.to_i &&
- can_create_post?(parent) &&
- Category.topic_create_allowed(self).limit(1).count == 1)
+ (
+ user && user.trust_level >= SiteSetting.min_trust_to_create_topic.to_i &&
+ can_create_post?(parent) && Category.topic_create_allowed(self).limit(1).count == 1
+ )
end
def can_create_topic_on_category?(category)
@@ -60,11 +58,18 @@ module TopicGuardian
category_id = Category === category ? category.id : category
can_create_topic?(nil) &&
- (!category || Category.topic_create_allowed(self).where(id: category_id).count == 1)
+ (!category || Category.topic_create_allowed(self).where(id: category_id).count == 1)
end
def can_move_topic_to_category?(category)
- category = Category === category ? category : Category.find(category || SiteSetting.uncategorized_category_id)
+ category =
+ (
+ if Category === category
+ category
+ else
+ Category.find(category || SiteSetting.uncategorized_category_id)
+ end
+ )
is_staff? || (can_create_topic_on_category?(category) && !category.require_topic_approval?)
end
@@ -75,7 +80,9 @@ module TopicGuardian
return false if topic.trashed?
return true if is_admin?
- trusted = (authenticated? && user.has_trust_level?(TrustLevel[4])) || is_moderator? || can_perform_action_available_to_group_moderators?(topic)
+ trusted =
+ (authenticated? && user.has_trust_level?(TrustLevel[4])) || is_moderator? ||
+ can_perform_action_available_to_group_moderators?(topic)
(!(topic.closed? || topic.archived?) || trusted) && can_create_post?(topic)
end
@@ -97,45 +104,40 @@ module TopicGuardian
# except for a tiny edge case where the topic is uncategorized and you are trying
# to fix it but uncategorized is disabled
if (
- SiteSetting.allow_uncategorized_topics ||
- topic.category_id != SiteSetting.uncategorized_category_id
- )
+ SiteSetting.allow_uncategorized_topics ||
+ topic.category_id != SiteSetting.uncategorized_category_id
+ )
return false if !can_create_topic_on_category?(topic.category)
end
# Editing a shared draft.
- return true if (
- !topic.archived &&
- !topic.private_message? &&
- topic.category_id == SiteSetting.shared_drafts_category.to_i &&
- can_see_category?(topic.category) &&
- can_see_shared_draft? &&
- can_create_post?(topic)
- )
+ if (
+ !topic.archived && !topic.private_message? &&
+ topic.category_id == SiteSetting.shared_drafts_category.to_i &&
+ can_see_category?(topic.category) && can_see_shared_draft? && can_create_post?(topic)
+ )
+ return true
+ end
# TL4 users can edit archived topics, but can not edit private messages
- return true if (
- SiteSetting.trusted_users_can_edit_others? &&
- topic.archived &&
- !topic.private_message? &&
- user.has_trust_level?(TrustLevel[4]) &&
- can_create_post?(topic)
- )
+ if (
+ SiteSetting.trusted_users_can_edit_others? && topic.archived && !topic.private_message? &&
+ user.has_trust_level?(TrustLevel[4]) && can_create_post?(topic)
+ )
+ return true
+ end
# TL3 users can not edit archived topics and private messages
- return true if (
- SiteSetting.trusted_users_can_edit_others? &&
- !topic.archived &&
- !topic.private_message? &&
- user.has_trust_level?(TrustLevel[3]) &&
- can_create_post?(topic)
- )
+ if (
+ SiteSetting.trusted_users_can_edit_others? && !topic.archived && !topic.private_message? &&
+ user.has_trust_level?(TrustLevel[3]) && can_create_post?(topic)
+ )
+ return true
+ end
return false if topic.archived
- is_my_own?(topic) &&
- !topic.edit_time_limit_expired?(user) &&
- !first_post&.locked? &&
+ is_my_own?(topic) && !topic.edit_time_limit_expired?(user) && !first_post&.locked? &&
(!first_post&.hidden? || can_edit_hidden_post?(first_post))
end
@@ -149,9 +151,13 @@ module TopicGuardian
def can_delete_topic?(topic)
!topic.trashed? &&
- (is_staff? || (is_my_own?(topic) && topic.posts_count <= 1 && topic.created_at && topic.created_at > 24.hours.ago) || is_category_group_moderator?(topic.category)) &&
- !topic.is_category_topic? &&
- !Discourse.static_doc_topic_ids.include?(topic.id)
+ (
+ is_staff? ||
+ (
+ is_my_own?(topic) && topic.posts_count <= 1 && topic.created_at &&
+ topic.created_at > 24.hours.ago
+ ) || is_category_group_moderator?(topic.category)
+ ) && !topic.is_category_topic? && !Discourse.static_doc_topic_ids.include?(topic.id)
end
def can_permanently_delete_topic?(topic)
@@ -165,15 +171,21 @@ module TopicGuardian
# All other posts that were deleted still must be permanently deleted
# before the topic can be deleted with the exception of small action
# posts that will be deleted right before the topic is.
- all_posts_count = Post.with_deleted
- .where(topic_id: topic.id)
- .where(post_type: [Post.types[:regular], Post.types[:moderator_action], Post.types[:whisper]])
- .count
+ all_posts_count =
+ Post
+ .with_deleted
+ .where(topic_id: topic.id)
+ .where(
+ post_type: [Post.types[:regular], Post.types[:moderator_action], Post.types[:whisper]],
+ )
+ .count
return false if all_posts_count > 1
return false if !is_admin? || !can_see_topic?(topic)
return false if !topic.deleted_at
- return false if topic.deleted_by_id == @user.id && topic.deleted_at >= Post::PERMANENT_DELETE_TIMER.ago
+ if topic.deleted_by_id == @user.id && topic.deleted_at >= Post::PERMANENT_DELETE_TIMER.ago
+ return false
+ end
true
end
@@ -181,7 +193,7 @@ module TopicGuardian
can_moderate?(topic) || can_perform_action_available_to_group_moderators?(topic)
end
- alias :can_create_unlisted_topic? :can_toggle_topic_visibility?
+ alias can_create_unlisted_topic? can_toggle_topic_visibility?
def can_convert_topic?(topic)
return false unless @user.in_any_groups?(SiteSetting.personal_message_enabled_groups_map)
@@ -228,13 +240,16 @@ module TopicGuardian
# Filter out topics with shared drafts if user cannot see shared drafts
if !can_see_shared_draft?
- default_scope = default_scope.left_outer_joins(:shared_draft).where("shared_drafts.id IS NULL")
+ default_scope =
+ default_scope.left_outer_joins(:shared_draft).where("shared_drafts.id IS NULL")
end
all_topics_scope =
if authenticated?
Topic.unscoped.merge(
- secured_regular_topic_scope(default_scope, topic_ids: topic_ids).or(private_message_topic_scope(default_scope))
+ secured_regular_topic_scope(default_scope, topic_ids: topic_ids).or(
+ private_message_topic_scope(default_scope),
+ ),
)
else
Topic.unscoped.merge(secured_regular_topic_scope(default_scope, topic_ids: topic_ids))
@@ -256,7 +271,10 @@ module TopicGuardian
category = topic.category
can_see_category?(category) &&
- (!category.read_restricted || !is_staged? || secure_category_ids.include?(category.id) || topic.user == user)
+ (
+ !category.read_restricted || !is_staged? || secure_category_ids.include?(category.id) ||
+ topic.user == user
+ )
end
def can_get_access_to_topic?(topic)
@@ -266,9 +284,17 @@ module TopicGuardian
def filter_allowed_categories(records)
return records if is_admin? && !SiteSetting.suppress_secured_categories_from_admin
- records = allowed_category_ids.size == 0 ?
- records.where('topics.category_id IS NULL') :
- records.where('topics.category_id IS NULL or topics.category_id IN (?)', allowed_category_ids)
+ records =
+ (
+ if allowed_category_ids.size == 0
+ records.where("topics.category_id IS NULL")
+ else
+ records.where(
+ "topics.category_id IS NULL or topics.category_id IN (?)",
+ allowed_category_ids,
+ )
+ end
+ )
records.references(:categories)
end
@@ -276,7 +302,10 @@ module TopicGuardian
def can_edit_featured_link?(category_id)
return false unless SiteSetting.topic_featured_link_enabled
return false unless @user.trust_level >= TrustLevel.levels[:basic]
- Category.where(id: category_id || SiteSetting.uncategorized_category_id, topic_featured_link_allowed: true).exists?
+ Category.where(
+ id: category_id || SiteSetting.uncategorized_category_id,
+ topic_featured_link_allowed: true,
+ ).exists?
end
def can_update_bumped_at?
@@ -292,7 +321,8 @@ module TopicGuardian
return false if topic.private_message? && !can_tag_pms?
return true if can_edit_topic?(topic)
- if topic&.first_post&.wiki && (@user.trust_level >= SiteSetting.min_trust_to_edit_wiki_post.to_i)
+ if topic&.first_post&.wiki &&
+ (@user.trust_level >= SiteSetting.min_trust_to_edit_wiki_post.to_i)
return can_create_post?(topic)
end
@@ -306,12 +336,12 @@ module TopicGuardian
is_category_group_moderator?(topic.category)
end
- alias :can_archive_topic? :can_perform_action_available_to_group_moderators?
- alias :can_close_topic? :can_perform_action_available_to_group_moderators?
- alias :can_open_topic? :can_perform_action_available_to_group_moderators?
- alias :can_split_merge_topic? :can_perform_action_available_to_group_moderators?
- alias :can_edit_staff_notes? :can_perform_action_available_to_group_moderators?
- alias :can_pin_unpin_topic? :can_perform_action_available_to_group_moderators?
+ alias can_archive_topic? can_perform_action_available_to_group_moderators?
+ alias can_close_topic? can_perform_action_available_to_group_moderators?
+ alias can_open_topic? can_perform_action_available_to_group_moderators?
+ alias can_split_merge_topic? can_perform_action_available_to_group_moderators?
+ alias can_edit_staff_notes? can_perform_action_available_to_group_moderators?
+ alias can_pin_unpin_topic? can_perform_action_available_to_group_moderators?
def can_move_posts?(topic)
return false if is_silenced?
@@ -327,12 +357,10 @@ module TopicGuardian
def private_message_topic_scope(scope)
pm_scope = scope.private_messages_for_user(user)
- if is_moderator?
- pm_scope = pm_scope.or(scope.where(<<~SQL))
+ pm_scope = pm_scope.or(scope.where(<<~SQL)) if is_moderator?
topics.subtype = '#{TopicSubtype.moderator_warning}'
OR topics.id IN (#{Topic.has_flag_scope.select(:topic_id).to_sql})
SQL
- end
pm_scope
end
@@ -357,7 +385,8 @@ module TopicGuardian
)
SQL
- secured_scope = secured_scope.or(Topic.unscoped.where(sql, user_id: user.id, topic_ids: topic_ids))
+ secured_scope =
+ secured_scope.or(Topic.unscoped.where(sql, user_id: user.id, topic_ids: topic_ids))
end
scope.listable_topics.merge(secured_scope)
diff --git a/lib/guardian/user_guardian.rb b/lib/guardian/user_guardian.rb
index 675e3431ef..2879ad036f 100644
--- a/lib/guardian/user_guardian.rb
+++ b/lib/guardian/user_guardian.rb
@@ -2,9 +2,8 @@
# mixin for all Guardian methods dealing with user permissions
module UserGuardian
-
def can_claim_reviewable_topic?(topic)
- SiteSetting.reviewable_claiming != 'disabled' && can_review_topic?(topic)
+ SiteSetting.reviewable_claiming != "disabled" && can_review_topic?(topic)
end
def can_pick_avatar?(user_avatar, upload)
@@ -63,13 +62,14 @@ module UserGuardian
if is_me?(user)
!SiteSetting.enable_discourse_connect &&
- !user.has_more_posts_than?(SiteSetting.delete_user_self_max_post_count)
+ !user.has_more_posts_than?(SiteSetting.delete_user_self_max_post_count)
else
- is_staff? && (
- user.first_post_created_at.nil? ||
- !user.has_more_posts_than?(User::MAX_STAFF_DELETE_POST_COUNT) ||
- user.first_post_created_at > SiteSetting.delete_user_max_post_age.to_i.days.ago
- )
+ is_staff? &&
+ (
+ user.first_post_created_at.nil? ||
+ !user.has_more_posts_than?(User::MAX_STAFF_DELETE_POST_COUNT) ||
+ user.first_post_created_at > SiteSetting.delete_user_max_post_age.to_i.days.ago
+ )
end
end
@@ -123,9 +123,7 @@ module UserGuardian
return true if !SiteSetting.allow_users_to_hide_profile?
# If a user has hidden their profile, restrict it to them and staff
- if user.user_option.try(:hide_profile_and_presence?)
- return is_me?(user) || is_staff?
- end
+ return is_me?(user) || is_staff? if user.user_option.try(:hide_profile_and_presence?)
true
end
@@ -141,14 +139,13 @@ module UserGuardian
is_staff_or_is_me = is_staff? || is_me?(user)
cache_key = is_staff_or_is_me ? :staff_or_me : :other
- @allowed_user_field_ids[cache_key] ||=
- begin
- if is_staff_or_is_me
- UserField.pluck(:id)
- else
- UserField.where("show_on_profile OR show_on_user_card").pluck(:id)
- end
+ @allowed_user_field_ids[cache_key] ||= begin
+ if is_staff_or_is_me
+ UserField.pluck(:id)
+ else
+ UserField.where("show_on_profile OR show_on_user_card").pluck(:id)
end
+ end
end
def can_feature_topic?(user, topic)
@@ -161,13 +158,14 @@ module UserGuardian
end
def can_see_review_queue?
- is_staff? || (
- SiteSetting.enable_category_group_moderation &&
- Reviewable
- .where(reviewable_by_group_id: @user.group_users.pluck(:group_id))
- .where('category_id IS NULL or category_id IN (?)', allowed_category_ids)
- .exists?
- )
+ is_staff? ||
+ (
+ SiteSetting.enable_category_group_moderation &&
+ Reviewable
+ .where(reviewable_by_group_id: @user.group_users.pluck(:group_id))
+ .where("category_id IS NULL or category_id IN (?)", allowed_category_ids)
+ .exists?
+ )
end
def can_see_summary_stats?(target_user)
@@ -175,11 +173,17 @@ module UserGuardian
end
def can_upload_profile_header?(user)
- (is_me?(user) && user.has_trust_level?(SiteSetting.min_trust_level_to_allow_profile_background.to_i)) || is_staff?
+ (
+ is_me?(user) &&
+ user.has_trust_level?(SiteSetting.min_trust_level_to_allow_profile_background.to_i)
+ ) || is_staff?
end
def can_upload_user_card_background?(user)
- (is_me?(user) && user.has_trust_level?(SiteSetting.min_trust_level_to_allow_user_card_background.to_i)) || is_staff?
+ (
+ is_me?(user) &&
+ user.has_trust_level?(SiteSetting.min_trust_level_to_allow_user_card_background.to_i)
+ ) || is_staff?
end
def can_upload_external?
diff --git a/lib/has_errors.rb b/lib/has_errors.rb
index 907e537d1c..daa8bb8e8d 100644
--- a/lib/has_errors.rb
+++ b/lib/has_errors.rb
@@ -33,11 +33,8 @@ module HasErrors
def add_errors_from(obj)
return if obj.blank?
- if obj.is_a?(StandardError)
- return add_error(obj.message)
- end
+ return add_error(obj.message) if obj.is_a?(StandardError)
obj.errors.full_messages.each { |msg| add_error(msg) }
end
-
end
diff --git a/lib/highlight_js.rb b/lib/highlight_js.rb
index 8ad4f26264..48bd77baf3 100644
--- a/lib/highlight_js.rb
+++ b/lib/highlight_js.rb
@@ -2,12 +2,47 @@
module HighlightJs
HIGHLIGHTJS_DIR ||= "#{Rails.root}/vendor/assets/javascripts/highlightjs/"
- BUNDLED_LANGS = %w(bash c cpp csharp css diff go graphql ini java javascript json kotlin less lua makefile xml markdown objectivec perl php php-template plaintext python python-repl r ruby rust scss shell sql swift typescript vbnet wasm yaml)
+ BUNDLED_LANGS = %w[
+ bash
+ c
+ cpp
+ csharp
+ css
+ diff
+ go
+ graphql
+ ini
+ java
+ javascript
+ json
+ kotlin
+ less
+ lua
+ makefile
+ xml
+ markdown
+ objectivec
+ perl
+ php
+ php-template
+ plaintext
+ python
+ python-repl
+ r
+ ruby
+ rust
+ scss
+ shell
+ sql
+ swift
+ typescript
+ vbnet
+ wasm
+ yaml
+ ]
def self.languages
- langs = Dir.glob(HIGHLIGHTJS_DIR + "languages/*.js").map do |path|
- File.basename(path)[0..-8]
- end
+ langs = Dir.glob(HIGHLIGHTJS_DIR + "languages/*.js").map { |path| File.basename(path)[0..-8] }
langs.sort
end
@@ -26,8 +61,9 @@ module HighlightJs
end
def self.version(lang_string)
- (@lang_string_cache ||= {})[lang_string] ||=
- Digest::SHA1.hexdigest(bundle lang_string.split("|"))
+ (@lang_string_cache ||= {})[lang_string] ||= Digest::SHA1.hexdigest(
+ bundle lang_string.split("|")
+ )
end
def self.path
diff --git a/lib/hijack.rb b/lib/hijack.rb
index 0b39abb710..eb9b5ce216 100644
--- a/lib/hijack.rb
+++ b/lib/hijack.rb
@@ -1,19 +1,17 @@
# frozen_string_literal: true
-require 'method_profiler'
+require "method_profiler"
# This module allows us to hijack a request and send it to the client in the deferred job queue
# For cases where we are making remote calls like onebox or proxying files and so on this helps
# free up a unicorn worker while the remote IO is happening
module Hijack
-
def hijack(info: nil, &blk)
controller_class = self.class
- if hijack = request.env['rack.hijack']
-
- request.env['discourse.request_tracker.skip'] = true
- request_tracker = request.env['discourse.request_tracker']
+ if hijack = request.env["rack.hijack"]
+ request.env["discourse.request_tracker.skip"] = true
+ request_tracker = request.env["discourse.request_tracker"]
# in the past unicorn would recycle env, this is not longer the case
env = request.env
@@ -32,7 +30,6 @@ module Hijack
original_headers = response.headers.dup
Scheduler::Defer.later("hijack #{params["controller"]} #{params["action"]} #{info}") do
-
MethodProfiler.start(transfer_timings)
begin
Thread.current[Logster::Logger::LOGSTER_ENV] = env
@@ -47,22 +44,22 @@ module Hijack
instance.response = response
instance.request = request_copy
- original_headers&.each do |k, v|
- instance.response.headers[k] = v
- end
+ original_headers&.each { |k, v| instance.response.headers[k] = v }
view_start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
begin
instance.instance_eval(&blk)
rescue => e
# TODO we need to reuse our exception handling in ApplicationController
- Discourse.warn_exception(e, message: "Failed to process hijacked response correctly", env: env)
+ Discourse.warn_exception(
+ e,
+ message: "Failed to process hijacked response correctly",
+ env: env,
+ )
end
view_runtime = Process.clock_gettime(Process::CLOCK_MONOTONIC) - view_start
- unless instance.response_body || response.committed?
- instance.status = 500
- end
+ instance.status = 500 unless instance.response_body || response.committed?
response.commit!
@@ -74,13 +71,11 @@ module Hijack
Discourse::Cors.apply_headers(cors_origins, env, headers)
end
- headers['Content-Type'] ||= response.content_type || "text/plain"
- headers['Content-Length'] = body.bytesize
- headers['Connection'] = "close"
+ headers["Content-Type"] ||= response.content_type || "text/plain"
+ headers["Content-Length"] = body.bytesize
+ headers["Connection"] = "close"
- if env[Auth::DefaultCurrentUserProvider::BAD_TOKEN]
- headers['Discourse-Logged-Out'] = '1'
- end
+ headers["Discourse-Logged-Out"] = "1" if env[Auth::DefaultCurrentUserProvider::BAD_TOKEN]
status_string = Rack::Utils::HTTP_STATUS_CODES[response.status.to_i] || "Unknown"
io.write "#{response.status} #{status_string}\r\n"
@@ -90,9 +85,7 @@ module Hijack
headers["X-Runtime"] = "#{"%0.6f" % duration}"
end
- headers.each do |name, val|
- io.write "#{name}: #{val}\r\n"
- end
+ headers.each { |name, val| io.write "#{name}: #{val}\r\n" }
io.write "\r\n"
io.write body
@@ -100,30 +93,35 @@ module Hijack
# happens if client terminated before we responded, ignore
io = nil
ensure
-
if Rails.configuration.try(:lograge).try(:enabled)
if timings
db_runtime = 0
- if timings[:sql]
- db_runtime = timings[:sql][:duration]
- end
+ db_runtime = timings[:sql][:duration] if timings[:sql]
subscriber = Lograge::LogSubscribers::ActionController.new
- payload = ActiveSupport::HashWithIndifferentAccess.new(
- controller: self.class.name,
- action: action_name,
- params: request.filtered_parameters,
- headers: request.headers,
- format: request.format.ref,
- method: request.request_method,
- path: request.fullpath,
- view_runtime: view_runtime * 1000.0,
- db_runtime: db_runtime * 1000.0,
- timings: timings,
- status: response.status
- )
+ payload =
+ ActiveSupport::HashWithIndifferentAccess.new(
+ controller: self.class.name,
+ action: action_name,
+ params: request.filtered_parameters,
+ headers: request.headers,
+ format: request.format.ref,
+ method: request.request_method,
+ path: request.fullpath,
+ view_runtime: view_runtime * 1000.0,
+ db_runtime: db_runtime * 1000.0,
+ timings: timings,
+ status: response.status,
+ )
- event = ActiveSupport::Notifications::Event.new("hijack", Time.now, Time.now + timings[:total_duration], "", payload)
+ event =
+ ActiveSupport::Notifications::Event.new(
+ "hijack",
+ Time.now,
+ Time.now + timings[:total_duration],
+ "",
+ payload,
+ )
subscriber.process_action(event)
end
end
@@ -131,10 +129,19 @@ module Hijack
MethodProfiler.clear
Thread.current[Logster::Logger::LOGSTER_ENV] = nil
- io.close if io rescue nil
+ begin
+ io.close if io
+ rescue StandardError
+ nil
+ end
if request_tracker
- status = response.status rescue 500
+ status =
+ begin
+ response.status
+ rescue StandardError
+ 500
+ end
request_tracker.log_request_info(env, [status, headers || {}, []], timings)
end
diff --git a/lib/homepage_constraint.rb b/lib/homepage_constraint.rb
index ef6686d5fd..78492e4f01 100644
--- a/lib/homepage_constraint.rb
+++ b/lib/homepage_constraint.rb
@@ -6,7 +6,7 @@ class HomePageConstraint
end
def matches?(request)
- return @filter == 'finish_installation' if SiteSetting.has_login_hint?
+ return @filter == "finish_installation" if SiteSetting.has_login_hint?
current_user = CurrentUser.lookup_from_env(request.env)
homepage = current_user&.user_option&.homepage || SiteSetting.anonymous_homepage
diff --git a/lib/html_prettify.rb b/lib/html_prettify.rb
index 06a00099b2..074a3c8a2a 100644
--- a/lib/html_prettify.rb
+++ b/lib/html_prettify.rb
@@ -82,14 +82,14 @@ class HtmlPrettify < String
elsif @options.include?(-1)
do_stupefy = true
else
- do_quotes = @options.include?(:quotes)
- do_backticks = @options.include?(:backticks)
- do_backticks = :both if @options.include?(:allbackticks)
- do_dashes = :normal if @options.include?(:dashes)
- do_dashes = :oldschool if @options.include?(:oldschool)
- do_dashes = :inverted if @options.include?(:inverted)
- do_ellipses = @options.include?(:ellipses)
- do_stupefy = @options.include?(:stupefy)
+ do_quotes = @options.include?(:quotes)
+ do_backticks = @options.include?(:backticks)
+ do_backticks = :both if @options.include?(:allbackticks)
+ do_dashes = :normal if @options.include?(:dashes)
+ do_dashes = :oldschool if @options.include?(:oldschool)
+ do_dashes = :inverted if @options.include?(:inverted)
+ do_ellipses = @options.include?(:ellipses)
+ do_stupefy = @options.include?(:stupefy)
end
# Parse the HTML
@@ -110,8 +110,8 @@ class HtmlPrettify < String
tokens.each do |token|
if token.first == :tag
result << token[1]
- if token[1] =~ %r!<(/?)(?:pre|code|kbd|script|math)[\s>]!
- in_pre = ($1 != "/") # Opening or closing tag?
+ if token[1] =~ %r{<(/?)(?:pre|code|kbd|script|math)[\s>]}
+ in_pre = ($1 != "/") # Opening or closing tag?
end
else
t = token[1]
@@ -120,24 +120,23 @@ class HtmlPrettify < String
last_char = t[-1].chr
unless in_pre
-
t.gsub!("'", "'")
t.gsub!(""", '"')
if do_dashes
- t = educate_dashes t if do_dashes == :normal
- t = educate_dashes_oldschool t if do_dashes == :oldschool
- t = educate_dashes_inverted t if do_dashes == :inverted
+ t = educate_dashes t if do_dashes == :normal
+ t = educate_dashes_oldschool t if do_dashes == :oldschool
+ t = educate_dashes_inverted t if do_dashes == :inverted
end
- t = educate_ellipses t if do_ellipses
+ t = educate_ellipses t if do_ellipses
t = educate_fractions t
# Note: backticks need to be processed before quotes.
if do_backticks
t = educate_backticks t
- t = educate_single_backticks t if do_backticks == :both
+ t = educate_single_backticks t if do_backticks == :both
end
if do_quotes
@@ -161,7 +160,7 @@ class HtmlPrettify < String
end
end
- t = stupefy_entities t if do_stupefy
+ t = stupefy_entities t if do_stupefy
end
prev_token_last_char = last_char
@@ -179,8 +178,7 @@ class HtmlPrettify < String
# em-dash HTML entity.
#
def educate_dashes(str)
- str.
- gsub(/--/, entity(:em_dash))
+ str.gsub(/--/, entity(:em_dash))
end
# The string, with each instance of "--" translated to an
@@ -188,9 +186,7 @@ class HtmlPrettify < String
# em-dash HTML entity.
#
def educate_dashes_oldschool(str)
- str.
- gsub(/---/, entity(:em_dash)).
- gsub(/--/, entity(:en_dash))
+ str.gsub(/---/, entity(:em_dash)).gsub(/--/, entity(:en_dash))
end
# Return the string, with each instance of "--" translated
@@ -204,9 +200,7 @@ class HtmlPrettify < String
# Aaron Swartz for the idea.)
#
def educate_dashes_inverted(str)
- str.
- gsub(/---/, entity(:en_dash)).
- gsub(/--/, entity(:em_dash))
+ str.gsub(/---/, entity(:en_dash)).gsub(/--/, entity(:em_dash))
end
# Return the string, with each instance of "..." translated
@@ -214,31 +208,25 @@ class HtmlPrettify < String
# spaces between the dots.
#
def educate_ellipses(str)
- str.
- gsub('...', entity(:ellipsis)).
- gsub('. . .', entity(:ellipsis))
+ str.gsub("...", entity(:ellipsis)).gsub(". . .", entity(:ellipsis))
end
# Return the string, with "``backticks''"-style single quotes
# translated into HTML curly quote entities.
#
def educate_backticks(str)
- str.
- gsub("``", entity(:double_left_quote)).
- gsub("''", entity(:double_right_quote))
+ str.gsub("``", entity(:double_left_quote)).gsub("''", entity(:double_right_quote))
end
# Return the string, with "`backticks'"-style single quotes
# translated into HTML curly quote entities.
#
def educate_single_backticks(str)
- str.
- gsub("`", entity(:single_left_quote)).
- gsub("'", entity(:single_right_quote))
+ str.gsub("`", entity(:single_left_quote)).gsub("'", entity(:single_right_quote))
end
def educate_fractions(str)
- str.gsub(/(\s+|^)(1\/4|1\/2|3\/4)([,.;\s]|$)/) do
+ str.gsub(%r{(\s+|^)(1/4|1/2|3/4)([,.;\s]|$)}) do
frac =
if $2 == "1/2"
entity(:frac12)
@@ -261,52 +249,45 @@ class HtmlPrettify < String
# Special case if the very first character is a quote followed by
# punctuation at a non-word-break. Close the quotes by brute
# force:
- str.gsub!(/^'(?=#{punct_class}\B)/,
- entity(:single_right_quote))
- str.gsub!(/^"(?=#{punct_class}\B)/,
- entity(:double_right_quote))
+ str.gsub!(/^'(?=#{punct_class}\B)/, entity(:single_right_quote))
+ str.gsub!(/^"(?=#{punct_class}\B)/, entity(:double_right_quote))
# Special case for double sets of quotes, e.g.:
#
He said, "'Quoted' words in a larger quote."
- str.gsub!(/"'(?=\w)/,
- "#{entity(:double_left_quote)}#{entity(:single_left_quote)}")
- str.gsub!(/'"(?=\w)/,
- "#{entity(:single_left_quote)}#{entity(:double_left_quote)}")
+ str.gsub!(/"'(?=\w)/, "#{entity(:double_left_quote)}#{entity(:single_left_quote)}")
+ str.gsub!(/'"(?=\w)/, "#{entity(:single_left_quote)}#{entity(:double_left_quote)}")
# Special case for decade abbreviations (the '80s):
- str.gsub!(/'(?=\d\ds)/,
- entity(:single_right_quote))
+ str.gsub!(/'(?=\d\ds)/, entity(:single_right_quote))
close_class = %![^\ \t\r\n\\[\{\(\-]!
dec_dashes = "#{entity(:en_dash)}|#{entity(:em_dash)}"
# Get most opening single quotes:
- str.gsub!(/(\s| |=|--|&[mn]dash;|#{dec_dashes}|ȁ[34];)'(?=\w)/,
- '\1' + entity(:single_left_quote))
+ str.gsub!(
+ /(\s| |=|--|&[mn]dash;|#{dec_dashes}|ȁ[34];)'(?=\w)/,
+ '\1' + entity(:single_left_quote),
+ )
# Single closing quotes:
- str.gsub!(/(#{close_class})'/,
- '\1' + entity(:single_right_quote))
- str.gsub!(/'(\s|s\b|$)/,
- entity(:single_right_quote) + '\1')
+ str.gsub!(/(#{close_class})'/, '\1' + entity(:single_right_quote))
+ str.gsub!(/'(\s|s\b|$)/, entity(:single_right_quote) + '\1')
# Any remaining single quotes should be opening ones:
- str.gsub!(/'/,
- entity(:single_left_quote))
+ str.gsub!(/'/, entity(:single_left_quote))
# Get most opening double quotes:
- str.gsub!(/(\s| |=|--|&[mn]dash;|#{dec_dashes}|ȁ[34];)"(?=\w)/,
- '\1' + entity(:double_left_quote))
+ str.gsub!(
+ /(\s| |=|--|&[mn]dash;|#{dec_dashes}|ȁ[34];)"(?=\w)/,
+ '\1' + entity(:double_left_quote),
+ )
# Double closing quotes:
- str.gsub!(/(#{close_class})"/,
- '\1' + entity(:double_right_quote))
- str.gsub!(/"(\s|s\b|$)/,
- entity(:double_right_quote) + '\1')
+ str.gsub!(/(#{close_class})"/, '\1' + entity(:double_right_quote))
+ str.gsub!(/"(\s|s\b|$)/, entity(:double_right_quote) + '\1')
# Any remaining quotes should be opening ones:
- str.gsub!(/"/,
- entity(:double_left_quote))
+ str.gsub!(/"/, entity(:double_left_quote))
str
end
@@ -320,16 +301,14 @@ class HtmlPrettify < String
new_str = str.dup
{
- en_dash: '-',
- em_dash: '--',
+ en_dash: "-",
+ em_dash: "--",
single_left_quote: "'",
single_right_quote: "'",
double_left_quote: '"',
double_right_quote: '"',
- ellipsis: '...'
- }.each do |k, v|
- new_str.gsub!(/#{entity(k)}/, v)
- end
+ ellipsis: "...",
+ }.each { |k, v| new_str.gsub!(/#{entity(k)}/, v) }
new_str
end
@@ -354,14 +333,12 @@ class HtmlPrettify < String
prev_end = 0
scan(tag_soup) do
- tokens << [:text, $1] if $1 != ""
+ tokens << [:text, $1] if $1 != ""
tokens << [:tag, $2]
prev_end = $~.end(0)
end
- if prev_end < size
- tokens << [:text, self[prev_end..-1]]
- end
+ tokens << [:text, self[prev_end..-1]] if prev_end < size
tokens
end
@@ -385,5 +362,4 @@ class HtmlPrettify < String
def entity(key)
@entities[key]
end
-
end
diff --git a/lib/html_to_markdown.rb b/lib/html_to_markdown.rb
index 2d2783d467..67626fd76e 100644
--- a/lib/html_to_markdown.rb
+++ b/lib/html_to_markdown.rb
@@ -3,12 +3,11 @@
require "securerandom"
class HtmlToMarkdown
-
def initialize(html, opts = {})
@opts = opts
# we're only interested in
- @doc = Nokogiri::HTML5(html).at("body")
+ @doc = Nokogiri.HTML5(html).at("body")
remove_not_allowed!(@doc)
remove_hidden!(@doc)
@@ -17,9 +16,7 @@ class HtmlToMarkdown
end
def to_markdown
- traverse(@doc)
- .gsub(/\n{2,}/, "\n\n")
- .strip
+ traverse(@doc).gsub(/\n{2,}/, "\n\n").strip
end
private
@@ -50,31 +47,33 @@ class HtmlToMarkdown
loop do
changed = false
- doc.css("br.#{klass}").each do |br|
- parent = br.parent
+ doc
+ .css("br.#{klass}")
+ .each do |br|
+ parent = br.parent
- if block?(parent)
- br.remove_class(klass)
- else
- before, after = parent.children.slice_when { |n| n == br }.to_a
+ if block?(parent)
+ br.remove_class(klass)
+ else
+ before, after = parent.children.slice_when { |n| n == br }.to_a
- if before.size > 1
- b = doc.document.create_element(parent.name)
- before[0...-1].each { |c| b.add_child(c) }
- parent.previous = b if b.inner_html.present?
+ if before.size > 1
+ b = doc.document.create_element(parent.name)
+ before[0...-1].each { |c| b.add_child(c) }
+ parent.previous = b if b.inner_html.present?
+ end
+
+ if after.present?
+ a = doc.document.create_element(parent.name)
+ after.each { |c| a.add_child(c) }
+ parent.next = a if a.inner_html.present?
+ end
+
+ parent.replace(br)
+
+ changed = true
end
-
- if after.present?
- a = doc.document.create_element(parent.name)
- after.each { |c| a.add_child(c) }
- parent.next = a if a.inner_html.present?
- end
-
- parent.replace(br)
-
- changed = true
end
- end
break if !changed
end
@@ -85,17 +84,21 @@ class HtmlToMarkdown
def remove_whitespaces!(node)
return true if "pre" == node.name
- node.children.chunk { |n| is_inline?(n) }.each do |inline, nodes|
- if inline
- collapse_spaces!(nodes) && remove_trailing_space!(nodes)
- else
- nodes.each { |n| remove_whitespaces!(n) }
+ node
+ .children
+ .chunk { |n| is_inline?(n) }
+ .each do |inline, nodes|
+ if inline
+ collapse_spaces!(nodes) && remove_trailing_space!(nodes)
+ else
+ nodes.each { |n| remove_whitespaces!(n) }
+ end
end
- end
end
def is_inline?(node)
- node.text? || ("br" != node.name && node.description&.inline? && node.children.all? { |n| is_inline?(n) })
+ node.text? ||
+ ("br" != node.name && node.description&.inline? && node.children.all? { |n| is_inline?(n) })
end
def collapse_spaces!(nodes, was_space = true)
@@ -141,15 +144,16 @@ class HtmlToMarkdown
send(visitor, node) if respond_to?(visitor, true)
end
- ALLOWED_IMG_SRCS ||= %w{http:// https:// www.}
+ ALLOWED_IMG_SRCS ||= %w[http:// https:// www.]
def allowed_hrefs
- @allowed_hrefs ||= begin
- hrefs = SiteSetting.allowed_href_schemes.split("|").map { |scheme| "#{scheme}:" }.to_set
- ALLOWED_IMG_SRCS.each { |src| hrefs << src }
- hrefs << "mailto:"
- hrefs.to_a
- end
+ @allowed_hrefs ||=
+ begin
+ hrefs = SiteSetting.allowed_href_schemes.split("|").map { |scheme| "#{scheme}:" }.to_set
+ ALLOWED_IMG_SRCS.each { |src| hrefs << src }
+ hrefs << "mailto:"
+ hrefs.to_a
+ end
end
def visit_a(node)
@@ -176,11 +180,9 @@ class HtmlToMarkdown
end
end
- ALLOWED ||= %w{kbd del ins small big sub sup dl dd dt mark}
+ ALLOWED ||= %w[kbd del ins small big sub sup dl dd dt mark]
ALLOWED.each do |tag|
- define_method("visit_#{tag}") do |node|
- "<#{tag}>#{traverse(node)}#{tag}>"
- end
+ define_method("visit_#{tag}") { |node| "<#{tag}>#{traverse(node)}#{tag}>" }
end
def visit_blockquote(node)
@@ -191,7 +193,7 @@ class HtmlToMarkdown
"\n\n#{text}\n\n"
end
- BLOCKS ||= %w{div tr}
+ BLOCKS ||= %w[div tr]
BLOCKS.each do |tag|
define_method("visit_#{tag}") do |node|
prefix = block?(node.previous_element) ? "" : "\n"
@@ -203,12 +205,8 @@ class HtmlToMarkdown
"\n\n#{traverse(node)}\n\n"
end
- TRAVERSABLES ||= %w{aside font span thead tbody tfooter u}
- TRAVERSABLES.each do |tag|
- define_method("visit_#{tag}") do |node|
- traverse(node)
- end
- end
+ TRAVERSABLES ||= %w[aside font span thead tbody tfooter u]
+ TRAVERSABLES.each { |tag| define_method("visit_#{tag}") { |node| traverse(node) } }
def visit_tt(node)
"`#{traverse(node)}`"
@@ -245,18 +243,10 @@ class HtmlToMarkdown
visit_abbr(node)
end
- (1..6).each do |n|
- define_method("visit_h#{n}") do |node|
- "#{"#" * n} #{traverse(node)}"
- end
- end
+ (1..6).each { |n| define_method("visit_h#{n}") { |node| "#{"#" * n} #{traverse(node)}" } }
- CELLS ||= %w{th td}
- CELLS.each do |tag|
- define_method("visit_#{tag}") do |node|
- "#{traverse(node)} "
- end
- end
+ CELLS ||= %w[th td]
+ CELLS.each { |tag| define_method("visit_#{tag}") { |node| "#{traverse(node)} " } }
def visit_table(node)
if rows = extract_rows(node)
@@ -264,7 +254,8 @@ class HtmlToMarkdown
text = "| " + headers.map { |td| traverse(td).gsub(/\n/, " ") }.join(" | ") + " |\n"
text << "| " + (["-"] * headers.size).join(" | ") + " |\n"
rows[1..-1].each do |row|
- text << "| " + row.css("td").map { |td| traverse(td).gsub(/\n/, " ") }.join(" | ") + " |\n"
+ text << "| " + row.css("td").map { |td| traverse(td).gsub(/\n/, " ") }.join(" | ") +
+ " |\n"
end
"\n\n#{text}\n\n"
else
@@ -280,7 +271,7 @@ class HtmlToMarkdown
rows
end
- LISTS ||= %w{ul ol}
+ LISTS ||= %w[ul ol]
LISTS.each do |tag|
define_method("visit_#{tag}") do |node|
prefix = block?(node.previous_element) ? "" : "\n"
@@ -304,12 +295,12 @@ class HtmlToMarkdown
"#{marker}#{text}#{suffix}"
end
- EMPHASES ||= %w{i em}
+ EMPHASES ||= %w[i em]
EMPHASES.each do |tag|
define_method("visit_#{tag}") do |node|
text = traverse(node)
- return "" if text.empty?
+ return "" if text.empty?
return " " if text.blank?
return "<#{tag}>#{text}#{tag}>" if text["\n"] || (text["*"] && text["_"])
@@ -321,12 +312,12 @@ class HtmlToMarkdown
end
end
- STRONGS ||= %w{b strong}
+ STRONGS ||= %w[b strong]
STRONGS.each do |tag|
define_method("visit_#{tag}") do |node|
text = traverse(node)
- return "" if text.empty?
+ return "" if text.empty?
return " " if text.blank?
return "<#{tag}>#{text}#{tag}>" if text["\n"] || (text["*"] && text["_"])
@@ -338,12 +329,12 @@ class HtmlToMarkdown
end
end
- STRIKES ||= %w{s strike}
+ STRIKES ||= %w[s strike]
STRIKES.each do |tag|
define_method("visit_#{tag}") do |node|
text = traverse(node)
- return "" if text.empty?
+ return "" if text.empty?
return " " if text.blank?
return "<#{tag}>#{text}#{tag}>" if text["\n"] || text["~~"]
@@ -358,7 +349,19 @@ class HtmlToMarkdown
node.text
end
- HTML5_BLOCK_ELEMENTS ||= %w[article aside details dialog figcaption figure footer header main nav section]
+ HTML5_BLOCK_ELEMENTS ||= %w[
+ article
+ aside
+ details
+ dialog
+ figcaption
+ figure
+ footer
+ header
+ main
+ nav
+ section
+ ]
def block?(node)
return false if !node
node.description&.block? || HTML5_BLOCK_ELEMENTS.include?(node.name)
diff --git a/lib/http_language_parser.rb b/lib/http_language_parser.rb
index debfddc604..a2e24ca0aa 100644
--- a/lib/http_language_parser.rb
+++ b/lib/http_language_parser.rb
@@ -4,10 +4,10 @@ module HttpLanguageParser
def self.parse(header)
# Rails I18n uses underscores between the locale and the region; the request
# headers use hyphens.
- require 'http_accept_language' unless defined? HttpAcceptLanguage
- available_locales = I18n.available_locales.map { |locale| locale.to_s.tr('_', '-') }
+ require "http_accept_language" unless defined?(HttpAcceptLanguage)
+ available_locales = I18n.available_locales.map { |locale| locale.to_s.tr("_", "-") }
parser = HttpAcceptLanguage::Parser.new(header)
- matched = parser.language_region_compatible_from(available_locales)&.tr('-', '_')
+ matched = parser.language_region_compatible_from(available_locales)&.tr("-", "_")
matched || SiteSetting.default_locale
end
end
diff --git a/lib/i18n/backend/discourse_i18n.rb b/lib/i18n/backend/discourse_i18n.rb
index 1511e67e96..4b1e24e9ec 100644
--- a/lib/i18n/backend/discourse_i18n.rb
+++ b/lib/i18n/backend/discourse_i18n.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'i18n/backend/pluralization'
+require "i18n/backend/pluralization"
module I18n
module Backend
@@ -22,9 +22,7 @@ module I18n
# force explicit loading
def load_translations(*filenames)
unless filenames.empty?
- self.class.sort_locale_files(filenames.flatten).each do |filename|
- load_file(filename)
- end
+ self.class.sort_locale_files(filenames.flatten).each { |filename| load_file(filename) }
end
end
@@ -90,10 +88,12 @@ module I18n
if overrides
if options[:count]
if !existing_translations
- I18n.fallbacks[locale].drop(1).each do |fallback|
- existing_translations = super(fallback, key, scope, options)
- break if existing_translations.present?
- end
+ I18n.fallbacks[locale]
+ .drop(1)
+ .each do |fallback|
+ existing_translations = super(fallback, key, scope, options)
+ break if existing_translations.present?
+ end
end
if existing_translations
@@ -106,9 +106,11 @@ module I18n
result = {}
- remapped_translations.merge(overrides).each do |k, v|
- result[k.split('.').last.to_sym] = v if k != key && k.start_with?(key)
- end
+ remapped_translations
+ .merge(overrides)
+ .each do |k, v|
+ result[k.split(".").last.to_sym] = v if k != key && k.start_with?(key)
+ end
return result if result.size > 0
end
end
diff --git a/lib/i18n/duplicate_key_finder.rb b/lib/i18n/duplicate_key_finder.rb
index 65b0f33d0e..4abd6300ff 100644
--- a/lib/i18n/duplicate_key_finder.rb
+++ b/lib/i18n/duplicate_key_finder.rb
@@ -3,7 +3,6 @@
require "locale_file_walker"
class DuplicateKeyFinder < LocaleFileWalker
-
def find_duplicates(path)
@keys_with_count = Hash.new { 0 }
handle_document(Psych.parse_file(path))
@@ -14,6 +13,6 @@ class DuplicateKeyFinder < LocaleFileWalker
def handle_scalar(node, depth, parents)
super
- @keys_with_count[parents.join('.')] += 1
+ @keys_with_count[parents.join(".")] += 1
end
end
diff --git a/lib/i18n/locale_file_checker.rb b/lib/i18n/locale_file_checker.rb
index 4d0088ab8c..de9eae3056 100644
--- a/lib/i18n/locale_file_checker.rb
+++ b/lib/i18n/locale_file_checker.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require 'i18n/i18n_interpolation_keys_finder'
-require 'yaml'
+require "i18n/i18n_interpolation_keys_finder"
+require "yaml"
class LocaleFileChecker
TYPE_MISSING_INTERPOLATION_KEYS = 1
@@ -17,7 +17,8 @@ class LocaleFileChecker
locale_files.each do |locale_path|
next unless reference_path = reference_file(locale_path)
- @relative_locale_path = Pathname.new(locale_path).relative_path_from(Pathname.new(Rails.root)).to_s
+ @relative_locale_path =
+ Pathname.new(locale_path).relative_path_from(Pathname.new(Rails.root)).to_s
@locale_yaml = YAML.load_file(locale_path)
@reference_yaml = YAML.load_file(reference_path)
@@ -34,14 +35,14 @@ class LocaleFileChecker
private
- YML_DIRS = ["config/locales", "plugins/**/locales"]
+ YML_DIRS = %w[config/locales plugins/**/locales]
PLURALS_FILE = "config/locales/plurals.rb"
REFERENCE_LOCALE = "en"
- REFERENCE_PLURAL_KEYS = ["one", "other"]
+ REFERENCE_PLURAL_KEYS = %w[one other]
# Some languages should always use %{count} in pluralized strings.
# https://meta.discourse.org/t/always-use-count-variable-when-translating-pluralized-strings/83969
- FORCE_PLURAL_COUNT_LOCALES = ["bs", "fr", "lt", "lv", "ru", "sl", "sr", "uk"]
+ FORCE_PLURAL_COUNT_LOCALES = %w[bs fr lt lv ru sl sr uk]
def locale_files
YML_DIRS.map { |dir| Dir["#{Rails.root}/#{dir}/{client,server}.#{@locale}.yml"] }.flatten
@@ -92,8 +93,17 @@ class LocaleFileChecker
missing_keys.delete("count")
end
- add_error(keys, TYPE_MISSING_INTERPOLATION_KEYS, missing_keys, pluralized: pluralized) unless missing_keys.empty?
- add_error(keys, TYPE_UNSUPPORTED_INTERPOLATION_KEYS, unsupported_keys, pluralized: pluralized) unless unsupported_keys.empty?
+ unless missing_keys.empty?
+ add_error(keys, TYPE_MISSING_INTERPOLATION_KEYS, missing_keys, pluralized: pluralized)
+ end
+ unless unsupported_keys.empty?
+ add_error(
+ keys,
+ TYPE_UNSUPPORTED_INTERPOLATION_KEYS,
+ unsupported_keys,
+ pluralized: pluralized,
+ )
+ end
end
end
@@ -123,12 +133,15 @@ class LocaleFileChecker
actual_plural_keys = parent.is_a?(Hash) ? parent.keys : []
missing_plural_keys = expected_plural_keys - actual_plural_keys
- add_error(keys, TYPE_MISSING_PLURAL_KEYS, missing_plural_keys, pluralized: true) unless missing_plural_keys.empty?
+ unless missing_plural_keys.empty?
+ add_error(keys, TYPE_MISSING_PLURAL_KEYS, missing_plural_keys, pluralized: true)
+ end
end
end
def check_message_format
- mf_locale, mf_filename = JsLocaleHelper.find_message_format_locale([@locale], fallback_to_english: true)
+ mf_locale, mf_filename =
+ JsLocaleHelper.find_message_format_locale([@locale], fallback_to_english: true)
traverse_hash(@locale_yaml, []) do |keys, value|
next unless keys.last.ends_with?("_MF")
@@ -158,17 +171,18 @@ class LocaleFileChecker
end
def reference_value_pluralized?(value)
- value.is_a?(Hash) &&
- value.keys.sort == REFERENCE_PLURAL_KEYS &&
+ value.is_a?(Hash) && value.keys.sort == REFERENCE_PLURAL_KEYS &&
value.keys.all? { |k| value[k].is_a?(String) }
end
def plural_keys
- @plural_keys ||= begin
- eval(File.read("#{Rails.root}/#{PLURALS_FILE}")).map do |locale, value| # rubocop:disable Security/Eval
- [locale.to_s, value[:i18n][:plural][:keys].map(&:to_s)]
- end.to_h
- end
+ @plural_keys ||=
+ begin
+ # rubocop:disable Security/Eval
+ eval(File.read("#{Rails.root}/#{PLURALS_FILE}"))
+ .map { |locale, value| [locale.to_s, value[:i18n][:plural][:keys].map(&:to_s)] }
+ .to_h
+ end
end
def add_error(keys, type, details, pluralized:)
@@ -180,10 +194,6 @@ class LocaleFileChecker
joined_key = keys[1..-1].join(".")
end
- @errors[@relative_locale_path] << {
- key: joined_key,
- type: type,
- details: details.to_s
- }
+ @errors[@relative_locale_path] << { key: joined_key, type: type, details: details.to_s }
end
end
diff --git a/lib/i18n/locale_file_walker.rb b/lib/i18n/locale_file_walker.rb
index facc7235c0..94c27dc336 100644
--- a/lib/i18n/locale_file_walker.rb
+++ b/lib/i18n/locale_file_walker.rb
@@ -22,7 +22,11 @@ class LocaleFileWalker
def handle_node(node, depth, parents, consecutive_scalars)
if node_is_scalar = node.is_a?(Psych::Nodes::Scalar)
- valid_scalar?(depth, consecutive_scalars) ? handle_scalar(node, depth, parents) : handle_value(node.value, parents)
+ if valid_scalar?(depth, consecutive_scalars)
+ handle_scalar(node, depth, parents)
+ else
+ handle_value(node.value, parents)
+ end
elsif node.is_a?(Psych::Nodes::Alias)
handle_alias(node, depth, parents)
elsif node.is_a?(Psych::Nodes::Mapping)
diff --git a/lib/image_sizer.rb b/lib/image_sizer.rb
index ddb86396d5..29118b3940 100644
--- a/lib/image_sizer.rb
+++ b/lib/image_sizer.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
module ImageSizer
-
# Resize an image to the aspect ratio we want
def self.resize(width, height, opts = {})
return if width.blank? || height.blank?
@@ -12,7 +11,7 @@ module ImageSizer
w = width.to_f
h = height.to_f
- return [w.floor, h.floor] if w <= max_width && h <= max_height
+ return w.floor, h.floor if w <= max_width && h <= max_height
ratio = [max_width / w, max_height / h].min
[(w * ratio).floor, (h * ratio).floor]
@@ -27,11 +26,10 @@ module ImageSizer
w = width.to_f
h = height.to_f
- return [w.floor, h.floor] if w <= max_width && h <= max_height
+ return w.floor, h.floor if w <= max_width && h <= max_height
ratio = max_width / w
[[max_width, w].min.floor, [max_height, (h * ratio)].min.floor]
end
-
end
diff --git a/lib/imap/providers/detector.rb b/lib/imap/providers/detector.rb
index 41f356517e..7ad50c4ea2 100644
--- a/lib/imap/providers/detector.rb
+++ b/lib/imap/providers/detector.rb
@@ -4,7 +4,7 @@ module Imap
module Providers
class Detector
def self.init_with_detected_provider(config)
- if config[:server] == 'imap.gmail.com'
+ if config[:server] == "imap.gmail.com"
return Imap::Providers::Gmail.new(config[:server], config)
end
Imap::Providers::Generic.new(config[:server], config)
diff --git a/lib/imap/providers/generic.rb b/lib/imap/providers/generic.rb
index 53ec57459d..59f43d35f2 100644
--- a/lib/imap/providers/generic.rb
+++ b/lib/imap/providers/generic.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
-require 'net/imap'
+require "net/imap"
module Imap
module Providers
- class WriteDisabledError < StandardError; end
+ class WriteDisabledError < StandardError
+ end
class TrashedMailResponse
attr_accessor :trashed_emails, :trash_uid_validity
@@ -50,12 +51,16 @@ module Imap
end
def disconnect!
- imap.logout rescue nil
+ begin
+ imap.logout
+ rescue StandardError
+ nil
+ end
imap.disconnect
end
def can?(capability)
- @capabilities ||= imap.responses['CAPABILITY'][-1] || imap.capability
+ @capabilities ||= imap.responses["CAPABILITY"][-1] || imap.capability
@capabilities.include?(capability)
end
@@ -67,22 +72,23 @@ module Imap
elsif opts[:to]
imap.uid_search("UID 1:#{opts[:to]}")
else
- imap.uid_search('ALL')
+ imap.uid_search("ALL")
end
end
def labels
- @labels ||= begin
- labels = {}
+ @labels ||=
+ begin
+ labels = {}
- list_mailboxes.each do |name|
- if tag = to_tag(name)
- labels[tag] = name
+ list_mailboxes.each do |name|
+ if tag = to_tag(name)
+ labels[tag] = name
+ end
end
- end
- labels
- end
+ labels
+ end
end
def open_mailbox(mailbox_name, write: false)
@@ -98,9 +104,7 @@ module Imap
@open_mailbox_name = mailbox_name
@open_mailbox_write = write
- {
- uid_validity: imap.responses['UIDVALIDITY'][-1]
- }
+ { uid_validity: imap.responses["UIDVALIDITY"][-1] }
end
def emails(uids, fields, opts = {})
@@ -114,9 +118,7 @@ module Imap
fetched.map do |email|
attributes = {}
- fields.each do |field|
- attributes[field] = email.attr[field]
- end
+ fields.each { |field| attributes[field] = email.attr[field] }
attributes
end
@@ -131,11 +133,11 @@ module Imap
def to_tag(label)
label = DiscourseTagging.clean_tag(label.to_s)
- label if label != 'inbox' && label != 'sent'
+ label if label != "inbox" && label != "sent"
end
def tag_to_flag(tag)
- :Seen if tag == 'seen'
+ :Seen if tag == "seen"
end
def tag_to_label(tag)
@@ -150,24 +152,25 @@ module Imap
def list_mailboxes_with_attributes(attr_filter = nil)
# Basically, list all mailboxes in the root of the server.
# ref: https://tools.ietf.org/html/rfc3501#section-6.3.8
- imap.list('', '*').reject do |m|
-
- # Noselect cannot be selected with the SELECT command.
- # technically we could use this for readonly mode when
- # SiteSetting.imap_write is disabled...maybe a later TODO
- # ref: https://tools.ietf.org/html/rfc3501#section-7.2.2
- m.attr.include?(:Noselect)
- end.select do |m|
-
- # There are Special-Use mailboxes denoted by an attribute. For
- # example, some common ones are \Trash or \Sent.
- # ref: https://tools.ietf.org/html/rfc6154
- if attr_filter
- m.attr.include? attr_filter
- else
- true
+ imap
+ .list("", "*")
+ .reject do |m|
+ # Noselect cannot be selected with the SELECT command.
+ # technically we could use this for readonly mode when
+ # SiteSetting.imap_write is disabled...maybe a later TODO
+ # ref: https://tools.ietf.org/html/rfc3501#section-7.2.2
+ m.attr.include?(:Noselect)
+ end
+ .select do |m|
+ # There are Special-Use mailboxes denoted by an attribute. For
+ # example, some common ones are \Trash or \Sent.
+ # ref: https://tools.ietf.org/html/rfc6154
+ if attr_filter
+ m.attr.include? attr_filter
+ else
+ true
+ end
end
- end
end
def filter_mailboxes(mailboxes)
@@ -186,16 +189,20 @@ module Imap
# Look for the special Trash XLIST attribute.
def trash_mailbox
- Discourse.cache.fetch("imap_trash_mailbox_#{account_digest}", expires_in: 30.minutes) do
- list_mailboxes(:Trash).first
- end
+ Discourse
+ .cache
+ .fetch("imap_trash_mailbox_#{account_digest}", expires_in: 30.minutes) do
+ list_mailboxes(:Trash).first
+ end
end
# Look for the special Junk XLIST attribute.
def spam_mailbox
- Discourse.cache.fetch("imap_spam_mailbox_#{account_digest}", expires_in: 30.minutes) do
- list_mailboxes(:Junk).first
- end
+ Discourse
+ .cache
+ .fetch("imap_spam_mailbox_#{account_digest}", expires_in: 30.minutes) do
+ list_mailboxes(:Junk).first
+ end
end
# open the trash mailbox for inspection or writing. after the yield we
@@ -232,14 +239,19 @@ module Imap
def find_trashed_by_message_ids(message_ids)
trashed_emails = []
- trash_uid_validity = open_trash_mailbox do
- trashed_email_uids = find_uids_by_message_ids(message_ids)
- if trashed_email_uids.any?
- trashed_emails = emails(trashed_email_uids, ["UID", "ENVELOPE"]).map do |e|
- BasicMail.new(message_id: Email::MessageIdService.message_id_clean(e['ENVELOPE'].message_id), uid: e['UID'])
+ trash_uid_validity =
+ open_trash_mailbox do
+ trashed_email_uids = find_uids_by_message_ids(message_ids)
+ if trashed_email_uids.any?
+ trashed_emails =
+ emails(trashed_email_uids, %w[UID ENVELOPE]).map do |e|
+ BasicMail.new(
+ message_id: Email::MessageIdService.message_id_clean(e["ENVELOPE"].message_id),
+ uid: e["UID"],
+ )
+ end
end
end
- end
TrashedMailResponse.new.tap do |resp|
resp.trashed_emails = trashed_emails
@@ -249,14 +261,19 @@ module Imap
def find_spam_by_message_ids(message_ids)
spam_emails = []
- spam_uid_validity = open_spam_mailbox do
- spam_email_uids = find_uids_by_message_ids(message_ids)
- if spam_email_uids.any?
- spam_emails = emails(spam_email_uids, ["UID", "ENVELOPE"]).map do |e|
- BasicMail.new(message_id: Email::MessageIdService.message_id_clean(e['ENVELOPE'].message_id), uid: e['UID'])
+ spam_uid_validity =
+ open_spam_mailbox do
+ spam_email_uids = find_uids_by_message_ids(message_ids)
+ if spam_email_uids.any?
+ spam_emails =
+ emails(spam_email_uids, %w[UID ENVELOPE]).map do |e|
+ BasicMail.new(
+ message_id: Email::MessageIdService.message_id_clean(e["ENVELOPE"].message_id),
+ uid: e["UID"],
+ )
+ end
end
end
- end
SpamMailResponse.new.tap do |resp|
resp.spam_emails = spam_emails
@@ -265,13 +282,14 @@ module Imap
end
def find_uids_by_message_ids(message_ids)
- header_message_id_terms = message_ids.map do |msgid|
- "HEADER Message-ID '#{Email::MessageIdService.message_id_rfc_format(msgid)}'"
- end
+ header_message_id_terms =
+ message_ids.map do |msgid|
+ "HEADER Message-ID '#{Email::MessageIdService.message_id_rfc_format(msgid)}'"
+ end
# OR clauses are written in Polish notation...so the query looks like this:
# OR OR HEADER Message-ID XXXX HEADER Message-ID XXXX HEADER Message-ID XXXX
- or_clauses = 'OR ' * (header_message_id_terms.length - 1)
+ or_clauses = "OR " * (header_message_id_terms.length - 1)
query = "#{or_clauses}#{header_message_id_terms.join(" ")}"
imap.uid_search(query)
@@ -280,17 +298,16 @@ module Imap
def trash(uid)
# MOVE is way easier than doing the COPY \Deleted EXPUNGE dance ourselves.
# It is supported by Gmail and Outlook.
- if can?('MOVE')
+ if can?("MOVE")
trash_move(uid)
else
-
# default behaviour for IMAP servers is to add the \Deleted flag
# then EXPUNGE the mailbox which permanently deletes these messages
# https://tools.ietf.org/html/rfc3501#section-6.4.3
#
# TODO: We may want to add the option at some point to copy to some
# other mailbox first before doing this (e.g. Trash)
- store(uid, 'FLAGS', [], ["\\Deleted"])
+ store(uid, "FLAGS", [], ["\\Deleted"])
imap.expunge
end
end
diff --git a/lib/imap/providers/gmail.rb b/lib/imap/providers/gmail.rb
index 7ac51f7304..fc888d66ed 100644
--- a/lib/imap/providers/gmail.rb
+++ b/lib/imap/providers/gmail.rb
@@ -8,61 +8,58 @@ module Imap
# all UIDs in a thread must have the \\Inbox label removed.
#
class Gmail < Generic
- X_GM_LABELS = 'X-GM-LABELS'
- X_GM_THRID = 'X-GM-THRID'
+ X_GM_LABELS = "X-GM-LABELS"
+ X_GM_THRID = "X-GM-THRID"
def imap
@imap ||= super.tap { |imap| apply_gmail_patch(imap) }
end
def emails(uids, fields, opts = {})
-
# gmail has a special header for labels
- if fields.include?('LABELS')
- fields[fields.index('LABELS')] = X_GM_LABELS
- end
+ fields[fields.index("LABELS")] = X_GM_LABELS if fields.include?("LABELS")
emails = super(uids, fields, opts)
emails.each do |email|
- email['LABELS'] = Array(email['LABELS'])
+ email["LABELS"] = Array(email["LABELS"])
if email[X_GM_LABELS]
- email['LABELS'] << Array(email.delete(X_GM_LABELS))
- email['LABELS'].flatten!
+ email["LABELS"] << Array(email.delete(X_GM_LABELS))
+ email["LABELS"].flatten!
end
- email['LABELS'] << '\\Inbox' if @open_mailbox_name == 'INBOX'
+ email["LABELS"] << '\\Inbox' if @open_mailbox_name == "INBOX"
- email['LABELS'].uniq!
+ email["LABELS"].uniq!
end
emails
end
def store(uid, attribute, old_set, new_set)
- attribute = X_GM_LABELS if attribute == 'LABELS'
+ attribute = X_GM_LABELS if attribute == "LABELS"
super(uid, attribute, old_set, new_set)
end
def to_tag(label)
# Label `\\Starred` is Gmail equivalent of :Flagged (both present)
- return 'starred' if label == :Flagged
- return if label == '[Gmail]/All Mail'
+ return "starred" if label == :Flagged
+ return if label == "[Gmail]/All Mail"
- label = label.to_s.gsub('[Gmail]/', '')
+ label = label.to_s.gsub("[Gmail]/", "")
super(label)
end
def tag_to_flag(tag)
- return :Flagged if tag == 'starred'
+ return :Flagged if tag == "starred"
super(tag)
end
def tag_to_label(tag)
- return '\\Important' if tag == 'important'
- return '\\Starred' if tag == 'starred'
+ return '\\Important' if tag == "important"
+ return '\\Starred' if tag == "starred"
super(tag)
end
@@ -73,11 +70,14 @@ module Imap
thread_id = thread_id_from_uid(uid)
emails_to_archive = emails_in_thread(thread_id)
emails_to_archive.each do |email|
- labels = email['LABELS']
+ labels = email["LABELS"]
new_labels = labels.reject { |l| l == "\\Inbox" }
store(email["UID"], "LABELS", labels, new_labels)
end
- ImapSyncLog.log("Thread ID #{thread_id} (UID #{uid}) archived in Gmail mailbox for #{@username}", :debug)
+ ImapSyncLog.log(
+ "Thread ID #{thread_id} (UID #{uid}) archived in Gmail mailbox for #{@username}",
+ :debug,
+ )
end
# Though Gmail considers the email thread unarchived if the first email
@@ -87,36 +87,38 @@ module Imap
thread_id = thread_id_from_uid(uid)
emails_to_unarchive = emails_in_thread(thread_id)
emails_to_unarchive.each do |email|
- labels = email['LABELS']
+ labels = email["LABELS"]
new_labels = labels.dup
- if !new_labels.include?("\\Inbox")
- new_labels << "\\Inbox"
- end
+ new_labels << "\\Inbox" if !new_labels.include?("\\Inbox")
store(email["UID"], "LABELS", labels, new_labels)
end
- ImapSyncLog.log("Thread ID #{thread_id} (UID #{uid}) unarchived in Gmail mailbox for #{@username}", :debug)
+ ImapSyncLog.log(
+ "Thread ID #{thread_id} (UID #{uid}) unarchived in Gmail mailbox for #{@username}",
+ :debug,
+ )
end
def thread_id_from_uid(uid)
fetched = imap.uid_fetch(uid, [X_GM_THRID])
- if !fetched
- raise "Thread not found for UID #{uid}!"
- end
+ raise "Thread not found for UID #{uid}!" if !fetched
fetched.last.attr[X_GM_THRID]
end
def emails_in_thread(thread_id)
uids_to_fetch = imap.uid_search("#{X_GM_THRID} #{thread_id}")
- emails(uids_to_fetch, ["UID", "LABELS"])
+ emails(uids_to_fetch, %w[UID LABELS])
end
def trash_move(uid)
thread_id = thread_id_from_uid(uid)
- email_uids_to_trash = emails_in_thread(thread_id).map { |e| e['UID'] }
+ email_uids_to_trash = emails_in_thread(thread_id).map { |e| e["UID"] }
imap.uid_move(email_uids_to_trash, trash_mailbox)
- ImapSyncLog.log("Thread ID #{thread_id} (UID #{uid}) trashed in Gmail mailbox for #{@username}", :debug)
+ ImapSyncLog.log(
+ "Thread ID #{thread_id} (UID #{uid}) trashed in Gmail mailbox for #{@username}",
+ :debug,
+ )
{ trash_uid_validity: open_trash_mailbox, email_uids_to_trash: email_uids_to_trash }
end
@@ -124,16 +126,15 @@ module Imap
# used for the dropdown in the UI where we allow the user to select the
# IMAP mailbox to sync with.
def filter_mailboxes(mailboxes_with_attributes)
- mailboxes_with_attributes.reject do |mb|
- (mb.attr & [:Drafts, :Sent, :Junk, :Flagged, :Trash]).any?
- end.map(&:name)
+ mailboxes_with_attributes
+ .reject { |mb| (mb.attr & %i[Drafts Sent Junk Flagged Trash]).any? }
+ .map(&:name)
end
private
def apply_gmail_patch(imap)
- class << imap.instance_variable_get('@parser')
-
+ class << imap.instance_variable_get("@parser")
# Modified version of the original `msg_att` from here:
# https://github.com/ruby/ruby/blob/1cc8ff001da217d0e98d13fe61fbc9f5547ef722/lib/net/imap.rb#L2346
#
@@ -172,15 +173,14 @@ module Imap
when /\A(?:MODSEQ)\z/ni
name, val = modseq_data
- # Adding support for GMail extended attributes.
+ # Adding support for GMail extended attributes.
when /\A(?:X-GM-LABELS)\z/ni
name, val = label_data
when /\A(?:X-GM-MSGID)\z/ni
name, val = uid_data
when /\A(?:X-GM-THRID)\z/ni
name, val = uid_data
- # End custom support for Gmail.
-
+ # End custom support for Gmail.
else
parse_error("unknown attribute `%s' for {%d}", token.value, n)
end
diff --git a/lib/imap/sync.rb b/lib/imap/sync.rb
index be5b32fb0d..0d9220131e 100644
--- a/lib/imap/sync.rb
+++ b/lib/imap/sync.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'net/imap'
+require "net/imap"
module Imap
class Sync
@@ -23,13 +23,13 @@ module Imap
end
def can_idle?
- SiteSetting.enable_imap_idle && @provider.can?('IDLE')
+ SiteSetting.enable_imap_idle && @provider.can?("IDLE")
end
def process(idle: false, import_limit: nil, old_emails_limit: nil, new_emails_limit: nil)
- raise 'disconnected' if disconnected?
+ raise "disconnected" if disconnected?
- import_limit ||= SiteSetting.imap_batch_import_email
+ import_limit ||= SiteSetting.imap_batch_import_email
old_emails_limit ||= SiteSetting.imap_polling_old_emails
new_emails_limit ||= SiteSetting.imap_polling_new_emails
@@ -43,30 +43,42 @@ module Imap
# If UID validity changes, the whole mailbox must be synchronized (all
# emails are considered new and will be associated to existent topics
# in Email::Receiver by matching Message-Ids).
- ImapSyncLog.warn("UIDVALIDITY = #{@status[:uid_validity]} does not match expected #{@group.imap_uid_validity}, invalidating IMAP cache and resyncing emails for mailbox #{@group.imap_mailbox_name}", @group)
+ ImapSyncLog.warn(
+ "UIDVALIDITY = #{@status[:uid_validity]} does not match expected #{@group.imap_uid_validity}, invalidating IMAP cache and resyncing emails for mailbox #{@group.imap_mailbox_name}",
+ @group,
+ )
@group.imap_last_uid = 0
end
if idle && !can_idle?
- ImapSyncLog.warn("IMAP server for group cannot IDLE or imap idle site setting is disabled", @group)
+ ImapSyncLog.warn(
+ "IMAP server for group cannot IDLE or imap idle site setting is disabled",
+ @group,
+ )
idle = false
end
if idle
- raise 'IMAP IDLE is disabled' if !SiteSetting.enable_imap_idle
+ raise "IMAP IDLE is disabled" if !SiteSetting.enable_imap_idle
# Thread goes into sleep and it is better to return any connection
# back to the pool.
ActiveRecord::Base.connection_handler.clear_active_connections!
idle_polling_mins = SiteSetting.imap_polling_period_mins.minutes.to_i
- ImapSyncLog.debug("Going IDLE for #{idle_polling_mins} seconds to wait for more work", @group, db: false)
+ ImapSyncLog.debug(
+ "Going IDLE for #{idle_polling_mins} seconds to wait for more work",
+ @group,
+ db: false,
+ )
- @provider.imap.idle(idle_polling_mins) do |resp|
- if resp.kind_of?(Net::IMAP::UntaggedResponse) && resp.name == 'EXISTS'
- @provider.imap.idle_done
+ @provider
+ .imap
+ .idle(idle_polling_mins) do |resp|
+ if resp.kind_of?(Net::IMAP::UntaggedResponse) && resp.name == "EXISTS"
+ @provider.imap.idle_done
+ end
end
- end
end
# Fetching UIDs of old (already imported into Discourse, but might need
@@ -82,7 +94,10 @@ module Imap
# Sometimes, new_uids contains elements from old_uids.
new_uids = new_uids - old_uids
- ImapSyncLog.debug("Remote email server has #{old_uids.size} old emails and #{new_uids.size} new emails", @group)
+ ImapSyncLog.debug(
+ "Remote email server has #{old_uids.size} old emails and #{new_uids.size} new emails",
+ @group,
+ )
all_old_uids_size = old_uids.size
all_new_uids_size = new_uids.size
@@ -90,7 +105,7 @@ module Imap
@group.update_columns(
imap_last_error: nil,
imap_old_emails: all_old_uids_size,
- imap_new_emails: all_new_uids_size
+ imap_new_emails: all_new_uids_size,
)
import_mode = import_limit > -1 && new_uids.size > import_limit
@@ -112,10 +127,10 @@ module Imap
end
def update_topic(email, incoming_email, opts = {})
- return if !incoming_email ||
- incoming_email.imap_sync ||
- !incoming_email.topic ||
- incoming_email.post&.post_number != 1
+ if !incoming_email || incoming_email.imap_sync || !incoming_email.topic ||
+ incoming_email.post&.post_number != 1
+ return
+ end
update_topic_archived_state(email, incoming_email, opts)
update_topic_tags(email, incoming_email, opts)
@@ -125,33 +140,41 @@ module Imap
def process_old_uids(old_uids)
ImapSyncLog.debug("Syncing #{old_uids.size} randomly-selected old emails", @group)
- emails = old_uids.empty? ? [] : @provider.emails(old_uids, ['UID', 'FLAGS', 'LABELS', 'ENVELOPE'])
+ emails = old_uids.empty? ? [] : @provider.emails(old_uids, %w[UID FLAGS LABELS ENVELOPE])
emails.each do |email|
- incoming_email = IncomingEmail.find_by(
- imap_uid_validity: @status[:uid_validity],
- imap_uid: email['UID'],
- imap_group_id: @group.id
- )
+ incoming_email =
+ IncomingEmail.find_by(
+ imap_uid_validity: @status[:uid_validity],
+ imap_uid: email["UID"],
+ imap_group_id: @group.id,
+ )
if incoming_email.present?
update_topic(email, incoming_email, mailbox_name: @group.imap_mailbox_name)
else
# try finding email by message-id instead, we may be able to set the uid etc.
- incoming_email = IncomingEmail.where(
- message_id: Email::MessageIdService.message_id_clean(email['ENVELOPE'].message_id),
- imap_uid: nil,
- imap_uid_validity: nil
- ).where("to_addresses LIKE ?", "%#{@group.email_username}%").first
+ incoming_email =
+ IncomingEmail
+ .where(
+ message_id: Email::MessageIdService.message_id_clean(email["ENVELOPE"].message_id),
+ imap_uid: nil,
+ imap_uid_validity: nil,
+ )
+ .where("to_addresses LIKE ?", "%#{@group.email_username}%")
+ .first
if incoming_email
incoming_email.update(
imap_uid_validity: @status[:uid_validity],
- imap_uid: email['UID'],
- imap_group_id: @group.id
+ imap_uid: email["UID"],
+ imap_group_id: @group.id,
)
update_topic(email, incoming_email, mailbox_name: @group.imap_mailbox_name)
else
- ImapSyncLog.warn("Could not find old email (UIDVALIDITY = #{@status[:uid_validity]}, UID = #{email['UID']})", @group)
+ ImapSyncLog.warn(
+ "Could not find old email (UIDVALIDITY = #{@status[:uid_validity]}, UID = #{email["UID"]})",
+ @group,
+ )
end
end
end
@@ -165,15 +188,18 @@ module Imap
# if they have been deleted and if so delete the associated post/topic. then the remaining we
# can just remove the imap details from the IncomingEmail table and if they end up back in the
# original mailbox then they will be picked up in a future resync.
- existing_incoming = IncomingEmail.includes(:post).where(
- imap_group_id: @group.id, imap_uid_validity: @status[:uid_validity]
- ).where.not(imap_uid: nil)
+ existing_incoming =
+ IncomingEmail
+ .includes(:post)
+ .where(imap_group_id: @group.id, imap_uid_validity: @status[:uid_validity])
+ .where.not(imap_uid: nil)
existing_uids = existing_incoming.map(&:imap_uid)
missing_uids = existing_uids - old_uids
- missing_message_ids = existing_incoming.select do |incoming|
- missing_uids.include?(incoming.imap_uid)
- end.map(&:message_id)
+ missing_message_ids =
+ existing_incoming
+ .select { |incoming| missing_uids.include?(incoming.imap_uid) }
+ .map(&:message_id)
return if missing_message_ids.empty?
@@ -183,7 +209,8 @@ module Imap
potential_spam = []
response = @provider.find_trashed_by_message_ids(missing_message_ids)
existing_incoming.each do |incoming|
- matching_trashed = response.trashed_emails.find { |email| email.message_id == incoming.message_id }
+ matching_trashed =
+ response.trashed_emails.find { |email| email.message_id == incoming.message_id }
if !matching_trashed
potential_spam << incoming
@@ -194,13 +221,22 @@ module Imap
# not exist, and this sync is just updating the old UIDs to the new ones
# in the trash, and we don't need to re-destroy the post
if incoming.post
- ImapSyncLog.debug("Deleting post ID #{incoming.post_id}, topic id #{incoming.topic_id}; email has been deleted on the IMAP server.", @group)
+ ImapSyncLog.debug(
+ "Deleting post ID #{incoming.post_id}, topic id #{incoming.topic_id}; email has been deleted on the IMAP server.",
+ @group,
+ )
PostDestroyer.new(Discourse.system_user, incoming.post).destroy
end
# the email has moved mailboxes, we don't want to try trashing again next time
- ImapSyncLog.debug("Updating incoming ID #{incoming.id} uid data FROM [UID #{incoming.imap_uid} | UIDVALIDITY #{incoming.imap_uid_validity}] TO [UID #{matching_trashed.uid} | UIDVALIDITY #{response.trash_uid_validity}] (TRASHED)", @group)
- incoming.update(imap_uid_validity: response.trash_uid_validity, imap_uid: matching_trashed.uid)
+ ImapSyncLog.debug(
+ "Updating incoming ID #{incoming.id} uid data FROM [UID #{incoming.imap_uid} | UIDVALIDITY #{incoming.imap_uid_validity}] TO [UID #{matching_trashed.uid} | UIDVALIDITY #{response.trash_uid_validity}] (TRASHED)",
+ @group,
+ )
+ incoming.update(
+ imap_uid_validity: response.trash_uid_validity,
+ imap_uid: matching_trashed.uid,
+ )
end
# This can be done because Message-ID is unique on a mail server between mailboxes,
@@ -208,12 +244,16 @@ module Imap
# the new UID from the spam.
response = @provider.find_spam_by_message_ids(missing_message_ids)
potential_spam.each do |incoming|
- matching_spam = response.spam_emails.find { |email| email.message_id == incoming.message_id }
+ matching_spam =
+ response.spam_emails.find { |email| email.message_id == incoming.message_id }
# if the email is not in the trash or spam then we don't know where it is... could
# be in any mailbox on the server or could be permanently deleted.
if !matching_spam
- ImapSyncLog.debug("Email for incoming ID #{incoming.id} (#{incoming.message_id}) could not be found in the group mailbox, trash, or spam. It could be in another mailbox or permanently deleted.", @group)
+ ImapSyncLog.debug(
+ "Email for incoming ID #{incoming.id} (#{incoming.message_id}) could not be found in the group mailbox, trash, or spam. It could be in another mailbox or permanently deleted.",
+ @group,
+ )
incoming.update(imap_missing: true)
next
end
@@ -222,12 +262,18 @@ module Imap
# not exist, and this sync is just updating the old UIDs to the new ones
# in the spam, and we don't need to re-destroy the post
if incoming.post
- ImapSyncLog.debug("Deleting post ID #{incoming.post_id}, topic id #{incoming.topic_id}; email has been moved to spam on the IMAP server.", @group)
+ ImapSyncLog.debug(
+ "Deleting post ID #{incoming.post_id}, topic id #{incoming.topic_id}; email has been moved to spam on the IMAP server.",
+ @group,
+ )
PostDestroyer.new(Discourse.system_user, incoming.post).destroy
end
# the email has moved mailboxes, we don't want to try marking as spam again next time
- ImapSyncLog.debug("Updating incoming ID #{incoming.id} uid data FROM [UID #{incoming.imap_uid} | UIDVALIDITY #{incoming.imap_uid_validity}] TO [UID #{matching_spam.uid} | UIDVALIDITY #{response.spam_uid_validity}] (SPAM)", @group)
+ ImapSyncLog.debug(
+ "Updating incoming ID #{incoming.id} uid data FROM [UID #{incoming.imap_uid} | UIDVALIDITY #{incoming.imap_uid_validity}] TO [UID #{matching_spam.uid} | UIDVALIDITY #{response.spam_uid_validity}] (SPAM)",
+ @group,
+ )
incoming.update(imap_uid_validity: response.spam_uid_validity, imap_uid: matching_spam.uid)
end
end
@@ -235,7 +281,7 @@ module Imap
def process_new_uids(new_uids, import_mode, all_old_uids_size, all_new_uids_size)
ImapSyncLog.debug("Syncing #{new_uids.size} new emails (oldest first)", @group)
- emails = @provider.emails(new_uids, ['UID', 'FLAGS', 'LABELS', 'RFC822'])
+ emails = @provider.emails(new_uids, %w[UID FLAGS LABELS RFC822])
processed = 0
# TODO (maybe): We might need something here to exclusively handle
@@ -247,29 +293,33 @@ module Imap
# (for example replies must be processed after the original email
# to have a topic where the reply can be posted).
begin
- receiver = Email::Receiver.new(
- email['RFC822'],
- allow_auto_generated: true,
- import_mode: import_mode,
- destinations: [@group],
- imap_uid_validity: @status[:uid_validity],
- imap_uid: email['UID'],
- imap_group_id: @group.id,
- source: :imap
- )
+ receiver =
+ Email::Receiver.new(
+ email["RFC822"],
+ allow_auto_generated: true,
+ import_mode: import_mode,
+ destinations: [@group],
+ imap_uid_validity: @status[:uid_validity],
+ imap_uid: email["UID"],
+ imap_group_id: @group.id,
+ source: :imap,
+ )
receiver.process!
update_topic(email, receiver.incoming_email, mailbox_name: @group.imap_mailbox_name)
rescue Email::Receiver::ProcessingError => e
- ImapSyncLog.warn("Could not process (UIDVALIDITY = #{@status[:uid_validity]}, UID = #{email['UID']}): #{e.message}", @group)
+ ImapSyncLog.warn(
+ "Could not process (UIDVALIDITY = #{@status[:uid_validity]}, UID = #{email["UID"]}): #{e.message}",
+ @group,
+ )
end
processed += 1
@group.update_columns(
imap_uid_validity: @status[:uid_validity],
- imap_last_uid: email['UID'],
+ imap_last_uid: email["UID"],
imap_old_emails: all_old_uids_size + processed,
- imap_new_emails: all_new_uids_size - processed
+ imap_new_emails: all_new_uids_size - processed,
)
end
end
@@ -281,7 +331,10 @@ module Imap
if to_sync.size > 0
@provider.open_mailbox(@group.imap_mailbox_name, write: true)
to_sync.each do |incoming_email|
- ImapSyncLog.debug("Updating email on IMAP server for incoming email ID = #{incoming_email.id}, UID = #{incoming_email.imap_uid}", @group)
+ ImapSyncLog.debug(
+ "Updating email on IMAP server for incoming email ID = #{incoming_email.id}, UID = #{incoming_email.imap_uid}",
+ @group,
+ )
update_email(incoming_email)
incoming_email.update(imap_sync: false)
end
@@ -292,7 +345,7 @@ module Imap
topic = incoming_email.topic
topic_is_archived = topic.group_archived_messages.size > 0
- email_is_archived = !email['LABELS'].include?('\\Inbox') && !email['LABELS'].include?('INBOX')
+ email_is_archived = !email["LABELS"].include?('\\Inbox') && !email["LABELS"].include?("INBOX")
if topic_is_archived && !email_is_archived
ImapSyncLog.debug("Unarchiving topic ID #{topic.id}, email was unarchived", @group)
@@ -322,10 +375,10 @@ module Imap
tags.add(@provider.to_tag(opts[:mailbox_name])) if opts[:mailbox_name]
# Flags and labels
- email['FLAGS'].each { |flag| tags.add(@provider.to_tag(flag)) }
- email['LABELS'].each { |label| tags.add(@provider.to_tag(label)) }
+ email["FLAGS"].each { |flag| tags.add(@provider.to_tag(flag)) }
+ email["LABELS"].each { |label| tags.add(@provider.to_tag(label)) }
- tags.subtract([nil, ''])
+ tags.subtract([nil, ""])
return if !tagging_enabled?
@@ -354,11 +407,11 @@ module Imap
#
# A) the email has been deleted/moved to a different mailbox in the provider
# B) the UID does not belong to the provider
- email = @provider.emails(incoming_email.imap_uid, ['FLAGS', 'LABELS']).first
+ email = @provider.emails(incoming_email.imap_uid, %w[FLAGS LABELS]).first
return if !email.present?
- labels = email['LABELS']
- flags = email['FLAGS']
+ labels = email["LABELS"]
+ flags = email["FLAGS"]
new_labels = []
new_flags = []
@@ -367,7 +420,10 @@ module Imap
if !topic
# no need to do anything further here, we will recognize the UIDs in the
# mail server email thread have been trashed on next sync
- ImapSyncLog.debug("Trashing UID #{incoming_email.imap_uid} (incoming ID #{incoming_email.id})", @group)
+ ImapSyncLog.debug(
+ "Trashing UID #{incoming_email.imap_uid} (incoming ID #{incoming_email.id})",
+ @group,
+ )
return @provider.trash(incoming_email.imap_uid)
end
@@ -380,12 +436,18 @@ module Imap
# at the same time.
new_labels << "\\Inbox"
- ImapSyncLog.debug("Unarchiving UID #{incoming_email.imap_uid} (incoming ID #{incoming_email.id})", @group)
+ ImapSyncLog.debug(
+ "Unarchiving UID #{incoming_email.imap_uid} (incoming ID #{incoming_email.id})",
+ @group,
+ )
# some providers need special handling for unarchiving too
@provider.unarchive(incoming_email.imap_uid)
else
- ImapSyncLog.debug("Archiving UID #{incoming_email.imap_uid} (incoming ID #{incoming_email.id})", @group)
+ ImapSyncLog.debug(
+ "Archiving UID #{incoming_email.imap_uid} (incoming ID #{incoming_email.id})",
+ @group,
+ )
# some providers need special handling for archiving. this way we preserve
# any new tag-labels, and archive, even though it may cause extra requests
@@ -397,13 +459,14 @@ module Imap
if tagging_enabled?
tags = topic.tags.pluck(:name)
new_flags = tags.map { |tag| @provider.tag_to_flag(tag) }.reject(&:blank?)
- new_labels = new_labels.concat(tags.map { |tag| @provider.tag_to_label(tag) }.reject(&:blank?))
+ new_labels =
+ new_labels.concat(tags.map { |tag| @provider.tag_to_label(tag) }.reject(&:blank?))
end
# regardless of whether the topic needs to be archived we still update
# the flags and the labels
- @provider.store(incoming_email.imap_uid, 'FLAGS', flags, new_flags)
- @provider.store(incoming_email.imap_uid, 'LABELS', labels, new_labels)
+ @provider.store(incoming_email.imap_uid, "FLAGS", flags, new_flags)
+ @provider.store(incoming_email.imap_uid, "LABELS", labels, new_labels)
end
def tagging_enabled?
diff --git a/lib/import/normalize.rb b/lib/import/normalize.rb
index c9c6dc7499..8b9b98b8e0 100644
--- a/lib/import/normalize.rb
+++ b/lib/import/normalize.rb
@@ -3,13 +3,14 @@
# markdown normalizer to be used by importers
#
#
-require 'htmlentities'
-module Import; end
+require "htmlentities"
+module Import
+end
module Import::Normalize
def self.normalize_code_blocks(code, lang = nil)
coder = HTMLEntities.new
- code.gsub(/