From 186379adac60048264d75c98f42da4b8c7d02fe2 Mon Sep 17 00:00:00 2001 From: Bianca Nenciu Date: Wed, 17 Nov 2021 18:59:44 +0200 Subject: [PATCH 001/168] FIX: Cache all watched words (#14992) It used to cache up to 1000 words, but the maximum number of watched word is 2000. --- app/services/word_watcher.rb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/app/services/word_watcher.rb b/app/services/word_watcher.rb index 1927ac9474..91d795a9d9 100644 --- a/app/services/word_watcher.rb +++ b/app/services/word_watcher.rb @@ -8,7 +8,10 @@ class WordWatcher end def self.words_for_action(action) - words = WatchedWord.where(action: WatchedWord.actions[action.to_sym]).limit(1000) + words = WatchedWord + .where(action: WatchedWord.actions[action.to_sym]) + .limit(WatchedWord::MAX_WORDS_PER_ACTION) + if WatchedWord.has_replacement?(action.to_sym) words.pluck(:word, :replacement).to_h else From 05423e9dfd77a05c7d9062aa7a5aeba8756d01f0 Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Wed, 17 Nov 2021 20:52:22 +0100 Subject: [PATCH 002/168] DEV: `I18n` global is no longer supported (#14993) Don't allow it when linting. --- lib/tasks/docker.rake | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/tasks/docker.rake b/lib/tasks/docker.rake index 1e712e53b9..30f8675e9c 100644 --- a/lib/tasks/docker.rake +++ b/lib/tasks/docker.rake @@ -70,7 +70,7 @@ task 'docker:test' do if ENV["SINGLE_PLUGIN"] @good &&= run_or_fail("bundle exec rubocop --parallel plugins/#{ENV["SINGLE_PLUGIN"]}") @good &&= run_or_fail("bundle exec ruby script/i18n_lint.rb plugins/#{ENV["SINGLE_PLUGIN"]}/config/locales/{client,server}.en.yml") - @good &&= run_or_fail("yarn eslint --global I18n --ext .js,.js.es6 --no-error-on-unmatched-pattern plugins/#{ENV['SINGLE_PLUGIN']}") + @good &&= run_or_fail("yarn eslint --ext .js,.js.es6 --no-error-on-unmatched-pattern plugins/#{ENV['SINGLE_PLUGIN']}") puts "Listing prettier offenses in #{ENV['SINGLE_PLUGIN']}:" @good &&= run_or_fail_prettier("plugins/#{ENV['SINGLE_PLUGIN']}/**/*.scss", "plugins/#{ENV['SINGLE_PLUGIN']}/**/*.{js,es6}") @@ -78,9 +78,7 @@ task 'docker:test' do @good &&= run_or_fail("bundle exec rake plugin:update_all") unless ENV["SKIP_PLUGINS"] @good &&= run_or_fail("bundle exec rubocop --parallel") unless ENV["SKIP_CORE"] @good &&= run_or_fail("yarn eslint app/assets/javascripts") unless ENV["SKIP_CORE"] - - # TODO: remove --global I18n once plugins can be updated - @good &&= run_or_fail("yarn eslint --global I18n --ext .js,.js.es6 --no-error-on-unmatched-pattern plugins") unless ENV["SKIP_PLUGINS"] + @good &&= run_or_fail("yarn eslint --ext .js,.js.es6 --no-error-on-unmatched-pattern plugins") unless ENV["SKIP_PLUGINS"] @good &&= run_or_fail('bundle exec ruby script/i18n_lint.rb "config/locales/{client,server}.en.yml"') unless ENV["SKIP_CORE"] @good &&= run_or_fail('bundle exec ruby script/i18n_lint.rb "plugins/**/locales/{client,server}.en.yml"') unless ENV["SKIP_PLUGINS"] From 9be69b603c049014e40e341fbddfb8c73344c703 Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Wed, 17 Nov 2021 20:56:06 +0100 Subject: [PATCH 003/168] DEV: Make `screen-track` a regular service (#14983) --- app/assets/javascripts/application.js | 1 - .../inject-discourse-objects.js | 15 ++------- .../app/{lib => services}/screen-track.js | 32 +++++++++++-------- .../tests/unit/controllers/topic-test.js | 3 +- .../{lib => services}/screen-track-test.js | 8 ++--- 5 files changed, 25 insertions(+), 34 deletions(-) rename app/assets/javascripts/discourse/app/{lib => services}/screen-track.js (95%) rename app/assets/javascripts/discourse/tests/unit/{lib => services}/screen-track-test.js (68%) diff --git a/app/assets/javascripts/application.js b/app/assets/javascripts/application.js index 6ddaadf6db..dbaf4acf87 100644 --- a/app/assets/javascripts/application.js +++ b/app/assets/javascripts/application.js @@ -74,7 +74,6 @@ //= require ./discourse/app/lib/link-mentions //= require ./discourse/app/components/site-header //= require ./discourse/app/components/d-editor -//= require ./discourse/app/lib/screen-track //= require ./discourse/app/routes/discourse //= require ./discourse/app/routes/build-topic-route //= require ./discourse/app/routes/restricted-user diff --git a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js index 576fbc589e..5fa2e380ba 100644 --- a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js +++ b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js @@ -5,7 +5,6 @@ import PrivateMessageTopicTrackingState from "discourse/models/private-message-t import DiscourseLocation from "discourse/lib/discourse-location"; import KeyValueStore from "discourse/lib/key-value-store"; import MessageBus from "message-bus-client"; -import ScreenTrack from "discourse/lib/screen-track"; import SearchService from "discourse/services/search"; import Session from "discourse/models/session"; import Site from "discourse/models/site"; @@ -69,16 +68,6 @@ export default { const session = Session.current(); app.register("session:main", session, { instantiate: false }); - // TODO: Automatically register this service - const screenTrack = new ScreenTrack( - topicTrackingState, - siteSettings, - session, - currentUser, - container.lookup("service:app-events") - ); - app.register("service:screen-track", screenTrack, { instantiate: false }); - app.register("location:discourse-location", DiscourseLocation); const keyValueStore = new KeyValueStore("discourse_"); @@ -87,7 +76,6 @@ export default { ALL_TARGETS.forEach((t) => { app.inject(t, "appEvents", "service:app-events"); - app.inject(t, "topicTrackingState", "topic-tracking-state:main"); app.inject(t, "pmTopicTrackingState", "pm-topic-tracking-state:main"); app.inject(t, "store", "service:store"); app.inject(t, "site", "site:main"); @@ -99,10 +87,11 @@ export default { app.inject(t, "session", "session:main"); app.inject(t, "messageBus", "message-bus:main"); app.inject(t, "siteSettings", "site-settings:main"); + app.inject(t, "topicTrackingState", "topic-tracking-state:main"); }); if (currentUser) { - ["component", "route", "controller", "service"].forEach((t) => { + ["controller", "component", "route", "service"].forEach((t) => { app.inject(t, "currentUser", "current-user:main"); }); } diff --git a/app/assets/javascripts/discourse/app/lib/screen-track.js b/app/assets/javascripts/discourse/app/services/screen-track.js similarity index 95% rename from app/assets/javascripts/discourse/app/lib/screen-track.js rename to app/assets/javascripts/discourse/app/services/screen-track.js index 5988a33300..647c3995f2 100644 --- a/app/assets/javascripts/discourse/app/lib/screen-track.js +++ b/app/assets/javascripts/discourse/app/services/screen-track.js @@ -1,3 +1,4 @@ +import Service, { inject as service } from "@ember/service"; import { ajax } from "discourse/lib/ajax"; import { bind } from "discourse-common/utils/decorators"; import { isTesting } from "discourse-common/config/environment"; @@ -10,21 +11,24 @@ const ANON_MAX_TOPIC_IDS = 5; const AJAX_FAILURE_DELAYS = [5000, 10000, 20000, 40000]; const ALLOWED_AJAX_FAILURES = [405, 429, 500, 501, 502, 503, 504]; -export default class { - constructor( - topicTrackingState, - siteSettings, - session, - currentUser, - appEvents - ) { - this.topicTrackingState = topicTrackingState; - this.siteSettings = siteSettings; - this.session = session; - this.currentUser = currentUser; - this.appEvents = appEvents; +export default class ScreenTrack extends Service { + @service appEvents; + + _consolidatedTimings = []; + _lastTick = null; + _lastScrolled = null; + _lastFlush = 0; + _timings = {}; + _totalTimings = {}; + _topicTime = 0; + _onscreen = []; + _readOnscreen = []; + _readPosts = {}; + _inProgress = false; + + constructor() { + super(...arguments); this.reset(); - this._consolidatedTimings = []; } start(topicId, topicController) { diff --git a/app/assets/javascripts/discourse/tests/unit/controllers/topic-test.js b/app/assets/javascripts/discourse/tests/unit/controllers/topic-test.js index 7da6519769..bd8badbf58 100644 --- a/app/assets/javascripts/discourse/tests/unit/controllers/topic-test.js +++ b/app/assets/javascripts/discourse/tests/unit/controllers/topic-test.js @@ -17,11 +17,10 @@ function topicWithStream(streamDetails) { discourseModule("Unit | Controller | topic", function (hooks) { hooks.beforeEach(function () { - this.registry.register("service:screen-track", {}, { instantiate: false }); this.registry.injection("controller", "appEvents", "service:app-events"); }); + hooks.afterEach(function () { - this.registry.unregister("service:screen-track"); this.registry.unregister("current-user:main"); let topic = this.container.lookup("controller:topic"); topic.setProperties({ diff --git a/app/assets/javascripts/discourse/tests/unit/lib/screen-track-test.js b/app/assets/javascripts/discourse/tests/unit/services/screen-track-test.js similarity index 68% rename from app/assets/javascripts/discourse/tests/unit/lib/screen-track-test.js rename to app/assets/javascripts/discourse/tests/unit/services/screen-track-test.js index b1a557811d..7eeceb4b26 100644 --- a/app/assets/javascripts/discourse/tests/unit/lib/screen-track-test.js +++ b/app/assets/javascripts/discourse/tests/unit/services/screen-track-test.js @@ -1,9 +1,9 @@ -import { module, test } from "qunit"; -import ScreenTrack from "discourse/lib/screen-track"; +import { discourseModule } from "discourse/tests/helpers/qunit-helpers"; +import { test } from "qunit"; -module("Unit | Utility | screen-track", function () { +discourseModule("Unit | Service | screen-track", function () { test("consolidateTimings", function (assert) { - const tracker = new ScreenTrack(); + const tracker = this.container.lookup("service:screen-track"); tracker.consolidateTimings({ 1: 10, 2: 5 }, 10, 1); tracker.consolidateTimings({ 1: 5, 3: 1 }, 3, 1); From b86127ad123ce4eede7bc861134f32d9312128d2 Mon Sep 17 00:00:00 2001 From: Osama Sayegh Date: Wed, 17 Nov 2021 23:27:30 +0300 Subject: [PATCH 004/168] FEATURE: Apply rate limits per user instead of IP for trusted users (#14706) Currently, Discourse rate limits all incoming requests by the IP address they originate from regardless of the user making the request. This can be frustrating if there are multiple users using Discourse simultaneously while sharing the same IP address (e.g. employees in an office). This commit implements a new feature to make Discourse apply rate limits by user id rather than IP address for users at or higher than the configured trust level (1 is the default). For example, let's say a Discourse instance is configured to allow 200 requests per minute per IP address, and we have 10 users at trust level 4 using Discourse simultaneously from the same IP address. Before this feature, the 10 users could only make a total of 200 requests per minute before they got rate limited. But with the new feature, each user is allowed to make 200 requests per minute because the rate limits are applied on user id rather than the IP address. The minimum trust level for applying user-id-based rate limits can be configured by the `skip_per_ip_rate_limit_trust_level` global setting. The default is 1, but it can be changed by either adding the `DISCOURSE_SKIP_PER_IP_RATE_LIMIT_TRUST_LEVEL` environment variable with the desired value to your `app.yml`, or changing the setting's value in the `discourse.conf` file. Requests made with API keys are still rate limited by IP address and the relevant global settings that control API keys rate limits. Before this commit, Discourse's auth cookie (`_t`) was simply a 32 characters string that Discourse used to lookup the current user from the database and the cookie contained no additional information about the user. However, we had to change the cookie content in this commit so we could identify the user from the cookie without making a database query before the rate limits logic and avoid introducing a bottleneck on busy sites. Besides the 32 characters auth token, the cookie now includes the user id, trust level and the cookie's generation date, and we encrypt/sign the cookie to prevent tampering. Internal ticket number: t54739. --- app/controllers/application_controller.rb | 10 +- app/models/user_api_key.rb | 18 ++ app/models/user_auth_token.rb | 3 +- config/discourse_defaults.conf | 3 + lib/admin_constraint.rb | 6 +- lib/auth/current_user_provider.rb | 6 +- lib/auth/default_current_user_provider.rb | 232 ++++++++++------- lib/guardian.rb | 11 +- lib/homepage_constraint.rb | 4 +- lib/middleware/anonymous_cache.rb | 4 +- lib/middleware/request_tracker.rb | 234 ++++++++++-------- lib/rate_limiter.rb | 9 +- lib/rate_limiter/limit_exceeded.rb | 5 +- lib/staff_constraint.rb | 6 +- .../default_current_user_provider_spec.rb | 188 ++++++++++---- spec/components/current_user_spec.rb | 11 +- spec/components/guardian_spec.rb | 19 +- spec/components/hijack_spec.rb | 2 +- .../middleware/anonymous_cache_spec.rb | 27 +- .../middleware/request_tracker_spec.rb | 188 +++++++++++++- spec/jobs/export_user_archive_spec.rb | 12 +- spec/multisite/request_tracker_spec.rb | 142 +++++++++++ spec/rails_helper.rb | 38 +++ spec/requests/application_controller_spec.rb | 63 +++++ spec/requests/session_controller_spec.rb | 9 +- spec/requests/users_controller_spec.rb | 13 +- spec/support/helpers.rb | 3 +- 27 files changed, 973 insertions(+), 293 deletions(-) create mode 100644 spec/multisite/request_tracker_spec.rb diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb index be5fd3b01f..0561be4c82 100644 --- a/app/controllers/application_controller.rb +++ b/app/controllers/application_controller.rb @@ -188,13 +188,21 @@ class ApplicationController < ActionController::Base rescue_from RateLimiter::LimitExceeded do |e| retry_time_in_seconds = e&.available_in + response_headers = { + 'Retry-After': retry_time_in_seconds.to_s + } + + if e&.error_code + response_headers['Discourse-Rate-Limit-Error-Code'] = e.error_code + end + with_resolved_locale do render_json_error( e.description, type: :rate_limit, status: 429, extras: { wait_seconds: retry_time_in_seconds }, - headers: { 'Retry-After': retry_time_in_seconds.to_s } + headers: response_headers ) end end diff --git a/app/models/user_api_key.rb b/app/models/user_api_key.rb index 1c708846b1..d0238781bc 100644 --- a/app/models/user_api_key.rb +++ b/app/models/user_api_key.rb @@ -31,6 +31,24 @@ class UserApiKey < ActiveRecord::Base @key.present? end + def ensure_allowed!(env) + raise Discourse::InvalidAccess.new if !allow?(env) + end + + def update_last_used(client_id) + update_args = { last_used_at: Time.zone.now } + if client_id.present? && client_id != self.client_id + # invalidate old dupe api key for client if needed + UserApiKey + .where(client_id: client_id, user_id: self.user_id) + .where('id <> ?', self.id) + .destroy_all + + update_args[:client_id] = client_id + end + self.update_columns(**update_args) + end + # Scopes allowed to be requested by external services def self.allowed_scopes Set.new(SiteSetting.allow_user_api_key_scopes.split("|")) diff --git a/app/models/user_auth_token.rb b/app/models/user_auth_token.rb index 5cb107cf7a..6566371b20 100644 --- a/app/models/user_auth_token.rb +++ b/app/models/user_auth_token.rb @@ -4,7 +4,8 @@ require 'digest/sha1' class UserAuthToken < ActiveRecord::Base belongs_to :user - ROTATE_TIME = 10.minutes + ROTATE_TIME_MINS = 10 + ROTATE_TIME = ROTATE_TIME_MINS.minutes # used when token did not arrive at client URGENT_ROTATE_TIME = 1.minute diff --git a/config/discourse_defaults.conf b/config/discourse_defaults.conf index f0d34270d0..293ff29393 100644 --- a/config/discourse_defaults.conf +++ b/config/discourse_defaults.conf @@ -239,6 +239,9 @@ max_reqs_per_ip_mode = block # bypass rate limiting any IP resolved as a private IP max_reqs_rate_limit_on_private = false +# use per user rate limits vs ip rate limits for users with this trust level or more. +skip_per_ip_rate_limit_trust_level = 1 + # logged in DoS protection # protection will only trigger for requests that queue longer than this amount diff --git a/lib/admin_constraint.rb b/lib/admin_constraint.rb index 2fd16b90b3..7146eedfee 100644 --- a/lib/admin_constraint.rb +++ b/lib/admin_constraint.rb @@ -8,10 +8,8 @@ class AdminConstraint def matches?(request) return false if @require_master && RailsMultisite::ConnectionManagement.current_db != "default" - provider = Discourse.current_user_provider.new(request.env) - provider.current_user && - provider.current_user.admin? && - custom_admin_check(request) + current_user = CurrentUser.lookup_from_env(request.env) + current_user&.admin? && custom_admin_check(request) rescue Discourse::InvalidAccess, Discourse::ReadOnly false end diff --git a/lib/auth/current_user_provider.rb b/lib/auth/current_user_provider.rb index 479a44d57d..31880b6dc6 100644 --- a/lib/auth/current_user_provider.rb +++ b/lib/auth/current_user_provider.rb @@ -14,12 +14,12 @@ class Auth::CurrentUserProvider end # log on a user and set cookies and session etc. - def log_on_user(user, session, cookies, opts = {}) + def log_on_user(user, session, cookie_jar, opts = {}) raise NotImplementedError end # optional interface to be called to refresh cookies etc if needed - def refresh_session(user, session, cookies) + def refresh_session(user, session, cookie_jar) end # api has special rights return true if api was detected @@ -37,7 +37,7 @@ class Auth::CurrentUserProvider raise NotImplementedError end - def log_off_user(session, cookies) + def log_off_user(session, cookie_jar) raise NotImplementedError end end diff --git a/lib/auth/default_current_user_provider.rb b/lib/auth/default_current_user_provider.rb index a35d40adda..4f369d9240 100644 --- a/lib/auth/default_current_user_provider.rb +++ b/lib/auth/default_current_user_provider.rb @@ -1,6 +1,27 @@ # frozen_string_literal: true require_relative '../route_matcher' +# You may have seen references to v0 and v1 of our auth cookie in the codebase +# and you're not sure how they differ, so here is an explanation: +# +# From the very early days of Discourse, the auth cookie (_t) consisted only of +# a 32 characters random string that Discourse used to identify/lookup the +# current user. We didn't include any metadata with the cookie or encrypt/sign +# it. +# +# That was v0 of the auth cookie until Nov 2021 when we merged a change that +# required us to store additional metadata with the cookie so we could get more +# information about current user early in the request lifecycle before we +# performed database lookup. We also started encrypting and signing the cookie +# to prevent tampering and obfuscate user information that we include in the +# cookie. This is v1 of our auth cookie and we still use it to this date. +# +# We still accept v0 of the auth cookie to keep users logged in, but upon +# cookie rotation (which happen every 10 minutes) they'll be switched over to +# the v1 format. +# +# We'll drop support for v0 after Discourse 2.9 is released. + class Auth::DefaultCurrentUserProvider CURRENT_USER_KEY ||= "_DISCOURSE_CURRENT_USER" @@ -19,6 +40,9 @@ class Auth::DefaultCurrentUserProvider PATH_INFO ||= "PATH_INFO" COOKIE_ATTEMPTS_PER_MIN ||= 10 BAD_TOKEN ||= "_DISCOURSE_BAD_TOKEN" + DECRYPTED_AUTH_COOKIE = "_DISCOURSE_DECRYPTED_AUTH_COOKIE" + + TOKEN_SIZE = 32 PARAMETER_API_PATTERNS ||= [ RouteMatcher.new( @@ -52,6 +76,25 @@ class Auth::DefaultCurrentUserProvider ), ] + def self.find_v0_auth_cookie(request) + cookie = request.cookies[TOKEN_COOKIE].presence + if cookie && cookie.size == TOKEN_SIZE + cookie + end + end + + def self.find_v1_auth_cookie(env) + return env[DECRYPTED_AUTH_COOKIE] if env.key?(DECRYPTED_AUTH_COOKIE) + + env[DECRYPTED_AUTH_COOKIE] = begin + request = ActionDispatch::Request.new(env) + # don't even initialize a cookie jar if we don't have a cookie at all + if request.cookies[TOKEN_COOKIE].present? + request.cookie_jar.encrypted[TOKEN_COOKIE] + end + end + end + # do all current user initialization here def initialize(env) @env = env @@ -86,11 +129,10 @@ class Auth::DefaultCurrentUserProvider api_key ||= request[API_KEY] end - auth_token = request.cookies[TOKEN_COOKIE] unless user_api_key || api_key - + auth_token = find_auth_token current_user = nil - if auth_token && auth_token.length == 32 + if auth_token limiter = RateLimiter.new(nil, "cookie_auth_#{request.ip}", COOKIE_ATTEMPTS_PER_MIN , 60) if limiter.can_perform? @@ -128,33 +170,42 @@ class Auth::DefaultCurrentUserProvider # possible we have an api call, impersonate if api_key current_user = lookup_api_user(api_key, request) - raise Discourse::InvalidAccess.new(I18n.t('invalid_api_credentials'), nil, custom_message: "invalid_api_credentials") unless current_user + if !current_user + raise Discourse::InvalidAccess.new( + I18n.t('invalid_api_credentials'), + nil, + custom_message: "invalid_api_credentials" + ) + end raise Discourse::InvalidAccess if current_user.suspended? || !current_user.active + admin_api_key_limiter.performed! if !Rails.env.profile? @env[API_KEY_ENV] = true - rate_limit_admin_api_requests! end # user api key handling if user_api_key + @hashed_user_api_key = ApiKey.hash_key(user_api_key) - hashed_user_api_key = ApiKey.hash_key(user_api_key) - limiter_min = RateLimiter.new(nil, "user_api_min_#{hashed_user_api_key}", GlobalSetting.max_user_api_reqs_per_minute, 60) - limiter_day = RateLimiter.new(nil, "user_api_day_#{hashed_user_api_key}", GlobalSetting.max_user_api_reqs_per_day, 86400) + user_api_key_obj = UserApiKey + .active + .joins(:user) + .where(key_hash: @hashed_user_api_key) + .includes(:user, :scopes) + .first - unless limiter_day.can_perform? - limiter_day.performed! - end + raise Discourse::InvalidAccess unless user_api_key_obj - unless limiter_min.can_perform? - limiter_min.performed! - end + user_api_key_limiter_60_secs.performed! + user_api_key_limiter_1_day.performed! - current_user = lookup_user_api_user_and_update_key(user_api_key, @env[USER_API_CLIENT_ID]) - raise Discourse::InvalidAccess unless current_user + user_api_key_obj.ensure_allowed!(@env) + + current_user = user_api_key_obj.user raise Discourse::InvalidAccess if current_user.suspended? || !current_user.active - limiter_min.performed! - limiter_day.performed! + if can_write? + user_api_key_obj.update_last_used(@env[USER_API_CLIENT_ID]) + end @env[USER_API_KEY_ENV] = true end @@ -178,7 +229,7 @@ class Auth::DefaultCurrentUserProvider @env[CURRENT_USER_KEY] = current_user end - def refresh_session(user, session, cookies) + def refresh_session(user, session, cookie_jar) # if user was not loaded, no point refreshing session # it could be an anonymous path, this would add cost return if is_api? || !@env.key?(CURRENT_USER_KEY) @@ -192,18 +243,18 @@ class Auth::DefaultCurrentUserProvider if @user_token.rotate!(user_agent: @env['HTTP_USER_AGENT'], client_ip: @request.ip, path: @env['REQUEST_PATH']) - cookies[TOKEN_COOKIE] = cookie_hash(@user_token.unhashed_auth_token) + set_auth_cookie!(@user_token.unhashed_auth_token, user, cookie_jar) DiscourseEvent.trigger(:user_session_refreshed, user) end end end - if !user && cookies.key?(TOKEN_COOKIE) - cookies.delete(TOKEN_COOKIE) + if !user && cookie_jar.key?(TOKEN_COOKIE) + cookie_jar.delete(TOKEN_COOKIE) end end - def log_on_user(user, session, cookies, opts = {}) + def log_on_user(user, session, cookie_jar, opts = {}) @user_token = UserAuthToken.generate!( user_id: user.id, user_agent: @env['HTTP_USER_AGENT'], @@ -212,7 +263,7 @@ class Auth::DefaultCurrentUserProvider staff: user.staff?, impersonate: opts[:impersonate]) - cookies[TOKEN_COOKIE] = cookie_hash(@user_token.unhashed_auth_token) + set_auth_cookie!(@user_token.unhashed_auth_token, user, cookie_jar) user.unstage! make_developer_admin(user) enable_bootstrap_mode(user) @@ -222,22 +273,29 @@ class Auth::DefaultCurrentUserProvider @env[CURRENT_USER_KEY] = user end - def cookie_hash(unhashed_auth_token) - hash = { - value: unhashed_auth_token, - httponly: true, - secure: SiteSetting.force_https + def set_auth_cookie!(unhashed_auth_token, user, cookie_jar) + data = { + token: unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: Time.zone.now.to_i } if SiteSetting.persistent_sessions - hash[:expires] = SiteSetting.maximum_session_age.hours.from_now + expires = SiteSetting.maximum_session_age.hours.from_now end if SiteSetting.same_site_cookies != "Disabled" - hash[:same_site] = SiteSetting.same_site_cookies + same_site = SiteSetting.same_site_cookies end - hash + cookie_jar.encrypted[TOKEN_COOKIE] = { + value: data, + httponly: true, + secure: SiteSetting.force_https, + expires: expires, + same_site: same_site + } end def make_developer_admin(user) @@ -258,7 +316,7 @@ class Auth::DefaultCurrentUserProvider end end - def log_off_user(session, cookies) + def log_off_user(session, cookie_jar) user = current_user if SiteSetting.log_out_strict && user @@ -266,7 +324,7 @@ class Auth::DefaultCurrentUserProvider if user.admin && defined?(Rack::MiniProfiler) # clear the profiling cookie to keep stuff tidy - cookies.delete("__profilin") + cookie_jar.delete("__profilin") end user.logged_out @@ -274,8 +332,8 @@ class Auth::DefaultCurrentUserProvider @user_token.destroy end - cookies.delete('authentication_data') - cookies.delete(TOKEN_COOKIE) + cookie_jar.delete('authentication_data') + cookie_jar.delete(TOKEN_COOKIE) end # api has special rights return true if api was detected @@ -290,14 +348,13 @@ class Auth::DefaultCurrentUserProvider end def has_auth_cookie? - cookie = @request.cookies[TOKEN_COOKIE] - !cookie.nil? && cookie.length == 32 + find_auth_token.present? end def should_update_last_seen? return false unless can_write? - api = !!(@env[API_KEY_ENV]) || !!(@env[USER_API_KEY_ENV]) + api = !!@env[API_KEY_ENV] || !!@env[USER_API_KEY_ENV] if @request.xhr? || api @env["HTTP_DISCOURSE_PRESENT"] == "true" @@ -308,31 +365,6 @@ class Auth::DefaultCurrentUserProvider protected - def lookup_user_api_user_and_update_key(user_api_key, client_id) - if api_key = UserApiKey.active.with_key(user_api_key).includes(:user, :scopes).first - unless api_key.allow?(@env) - raise Discourse::InvalidAccess - end - - if can_write? - api_key.update_columns(last_used_at: Time.zone.now) - - if client_id.present? && client_id != api_key.client_id - - # invalidate old dupe api key for client if needed - UserApiKey - .where(client_id: client_id, user_id: api_key.user_id) - .where('id <> ?', api_key.id) - .destroy_all - - api_key.update_columns(client_id: client_id) - end - end - - api_key.user - end - end - def lookup_api_user(api_key_value, request) if api_key = ApiKey.active.with_key(api_key_value).includes(:user).first api_username = header_api_key? ? @env[HEADER_API_USERNAME] : request[API_USERNAME] @@ -378,27 +410,61 @@ class Auth::DefaultCurrentUserProvider !!@env[HEADER_API_KEY] end - def rate_limit_admin_api_requests! - return if Rails.env == "profile" - - limit = GlobalSetting.max_admin_api_reqs_per_minute.to_i - if GlobalSetting.respond_to?(:max_admin_api_reqs_per_key_per_minute) - Discourse.deprecate("DISCOURSE_MAX_ADMIN_API_REQS_PER_KEY_PER_MINUTE is deprecated. Please use DISCOURSE_MAX_ADMIN_API_REQS_PER_MINUTE", drop_from: '2.9.0') - limit = [ GlobalSetting.max_admin_api_reqs_per_key_per_minute.to_i, limit].max - end - - global_limit = RateLimiter.new( - nil, - "admin_api_min", - limit, - 60 - ) - - global_limit.performed! - end - def can_write? @can_write ||= !Discourse.pg_readonly_mode? end + def admin_api_key_limiter + return @admin_api_key_limiter if @admin_api_key_limiter + + limit = GlobalSetting.max_admin_api_reqs_per_minute.to_i + if GlobalSetting.respond_to?(:max_admin_api_reqs_per_key_per_minute) + Discourse.deprecate("DISCOURSE_MAX_ADMIN_API_REQS_PER_KEY_PER_MINUTE is deprecated. Please use DISCOURSE_MAX_ADMIN_API_REQS_PER_MINUTE", drop_from: '2.9.0') + limit = [ + GlobalSetting.max_admin_api_reqs_per_key_per_minute.to_i, + limit + ].max + end + @admin_api_key_limiter = RateLimiter.new( + nil, + "admin_api_min", + limit, + 60, + error_code: "admin_api_key_rate_limit" + ) + end + + def user_api_key_limiter_60_secs + @user_api_key_limiter_60_secs ||= RateLimiter.new( + nil, + "user_api_min_#{@hashed_user_api_key}", + GlobalSetting.max_user_api_reqs_per_minute, + 60, + error_code: "user_api_key_limiter_60_secs" + ) + end + + def user_api_key_limiter_1_day + @user_api_key_limiter_1_day ||= RateLimiter.new( + nil, + "user_api_day_#{@hashed_user_api_key}", + GlobalSetting.max_user_api_reqs_per_day, + 86400, + error_code: "user_api_key_limiter_1_day" + ) + end + + def find_auth_token + return @auth_token if defined?(@auth_token) + + @auth_token = begin + if v0 = self.class.find_v0_auth_cookie(@request) + v0 + elsif v1 = self.class.find_v1_auth_cookie(@env) + if v1[:issued_at] >= SiteSetting.maximum_session_age.hours.ago.to_i + v1[:token] + end + end + end + end end diff --git a/lib/guardian.rb b/lib/guardian.rb index a4af71e2a5..a0ba295b5d 100644 --- a/lib/guardian.rb +++ b/lib/guardian.rb @@ -529,9 +529,16 @@ class Guardian end def auth_token - if cookie = request&.cookies[Auth::DefaultCurrentUserProvider::TOKEN_COOKIE] - UserAuthToken.hash_token(cookie) + return if !request + + token = Auth::DefaultCurrentUserProvider.find_v0_auth_cookie(request).presence + + if !token + cookie = Auth::DefaultCurrentUserProvider.find_v1_auth_cookie(request.env) + token = cookie[:token] if cookie end + + UserAuthToken.hash_token(token) if token end private diff --git a/lib/homepage_constraint.rb b/lib/homepage_constraint.rb index 7c04acebb2..ef6686d5fd 100644 --- a/lib/homepage_constraint.rb +++ b/lib/homepage_constraint.rb @@ -8,8 +8,8 @@ class HomePageConstraint def matches?(request) return @filter == 'finish_installation' if SiteSetting.has_login_hint? - provider = Discourse.current_user_provider.new(request.env) - homepage = provider&.current_user&.user_option&.homepage || SiteSetting.anonymous_homepage + current_user = CurrentUser.lookup_from_env(request.env) + homepage = current_user&.user_option&.homepage || SiteSetting.anonymous_homepage homepage == @filter rescue Discourse::InvalidAccess, Discourse::ReadOnly false diff --git a/lib/middleware/anonymous_cache.rb b/lib/middleware/anonymous_cache.rb index cb06d8793f..059b6d457f 100644 --- a/lib/middleware/anonymous_cache.rb +++ b/lib/middleware/anonymous_cache.rb @@ -49,9 +49,9 @@ module Middleware ACCEPT_ENCODING = "HTTP_ACCEPT_ENCODING" DISCOURSE_RENDER = "HTTP_DISCOURSE_RENDER" - def initialize(env) + def initialize(env, request = nil) @env = env - @request = Rack::Request.new(@env) + @request = request || Rack::Request.new(@env) end def blocked_crawler? diff --git a/lib/middleware/request_tracker.rb b/lib/middleware/request_tracker.rb index b5fe3216ca..fa035a0087 100644 --- a/lib/middleware/request_tracker.rb +++ b/lib/middleware/request_tracker.rb @@ -4,7 +4,6 @@ require 'method_profiler' require 'middleware/anonymous_cache' class Middleware::RequestTracker - @@detailed_request_loggers = nil @@ip_skipper = nil @@ -56,6 +55,10 @@ class Middleware::RequestTracker @@ip_skipper = blk end + def self.ip_skipper + @@ip_skipper + end + def initialize(app, settings = {}) @app = app end @@ -92,23 +95,25 @@ class Middleware::RequestTracker end end - def self.get_data(env, result, timing) + def self.get_data(env, result, timing, request = nil) status, headers = result status = status.to_i - helper = Middleware::AnonymousCache::Helper.new(env) - request = Rack::Request.new(env) + request ||= Rack::Request.new(env) + helper = Middleware::AnonymousCache::Helper.new(env, request) env_track_view = env["HTTP_DISCOURSE_TRACK_VIEW"] track_view = status == 200 track_view &&= env_track_view != "0" && env_track_view != "false" track_view &&= env_track_view || (request.get? && !request.xhr? && headers["Content-Type"] =~ /text\/html/) track_view = !!track_view + has_auth_cookie = Auth::DefaultCurrentUserProvider.find_v0_auth_cookie(request).present? + has_auth_cookie ||= Auth::DefaultCurrentUserProvider.find_v1_auth_cookie(env).present? h = { status: status, is_crawler: helper.is_crawler?, - has_auth_cookie: helper.has_auth_cookie?, + has_auth_cookie: has_auth_cookie, is_background: !!(request.path =~ /^\/message-bus\// || request.path =~ /\/topics\/timings/), is_mobile: helper.is_mobile?, track_view: track_view, @@ -132,9 +137,9 @@ class Middleware::RequestTracker h end - def log_request_info(env, result, info) + def log_request_info(env, result, info, request = nil) # we got to skip this on error ... its just logging - data = self.class.get_data(env, result, info) rescue nil + data = self.class.get_data(env, result, info, request) rescue nil if data if result && (headers = result[1]) @@ -165,7 +170,7 @@ class Middleware::RequestTracker def call(env) result = nil - log_request = true + info = nil # doing this as early as possible so we have an # accurate counter @@ -173,14 +178,20 @@ class Middleware::RequestTracker request = Rack::Request.new(env) - if available_in = rate_limit(request) - return [ - 429, - { "Retry-After" => available_in.to_s }, - ["Slow down, too many requests from this IP address"] - ] + cookie = find_auth_cookie(env) + if error_details = rate_limit(request, cookie) + available_in, error_code = error_details + message = <<~TEXT + Slow down, too many requests from this IP address. + Please retry again in #{available_in} seconds. + Error code: #{error_code}. + TEXT + headers = { + "Retry-After" => available_in.to_s, + "Discourse-Rate-Limit-Error-Code" => error_code + } + return [429, headers, [message]] end - env["discourse.request_tracker"] = self MethodProfiler.start @@ -222,93 +233,8 @@ class Middleware::RequestTracker end end end - log_request_info(env, result, info) unless !log_request || env["discourse.request_tracker.skip"] - end - - def is_private_ip?(ip) - ip = IPAddr.new(ip) rescue nil - !!(ip && (ip.private? || ip.loopback?)) - end - - def rate_limit(request) - if ( - GlobalSetting.max_reqs_per_ip_mode == "block" || - GlobalSetting.max_reqs_per_ip_mode == "warn" || - GlobalSetting.max_reqs_per_ip_mode == "warn+block" - ) - - ip = request.ip - - if !GlobalSetting.max_reqs_rate_limit_on_private - return false if is_private_ip?(ip) - end - - return false if @@ip_skipper&.call(ip) - return false if STATIC_IP_SKIPPER&.any? { |entry| entry.include?(ip) } - - limiter10 = RateLimiter.new( - nil, - "global_ip_limit_10_#{ip}", - GlobalSetting.max_reqs_per_ip_per_10_seconds, - 10, - global: true, - aggressive: true - ) - - limiter60 = RateLimiter.new( - nil, - "global_ip_limit_60_#{ip}", - GlobalSetting.max_reqs_per_ip_per_minute, - 60, - global: true, - aggressive: true - ) - - limiter_assets10 = RateLimiter.new( - nil, - "global_ip_limit_10_assets_#{ip}", - GlobalSetting.max_asset_reqs_per_ip_per_10_seconds, - 10, - global: true - ) - - request.env['DISCOURSE_RATE_LIMITERS'] = [limiter10, limiter60] - request.env['DISCOURSE_ASSET_RATE_LIMITERS'] = [limiter_assets10] - - warn = GlobalSetting.max_reqs_per_ip_mode == "warn" || GlobalSetting.max_reqs_per_ip_mode == "warn+block" - - if !limiter_assets10.can_perform? - if warn - Discourse.warn("Global asset IP rate limit exceeded for #{ip}: 10 second rate limit", uri: request.env["REQUEST_URI"]) - end - - if GlobalSetting.max_reqs_per_ip_mode != "warn" - return limiter_assets10.seconds_to_wait(Time.now.to_i) - else - return false - end - end - - begin - type = 10 - limiter10.performed! - - type = 60 - limiter60.performed! - - false - rescue RateLimiter::LimitExceeded => e - if warn - Discourse.warn("Global IP rate limit exceeded for #{ip}: #{type} second rate limit", uri: request.env["REQUEST_URI"]) - if GlobalSetting.max_reqs_per_ip_mode != "warn" - e.available_in - else - false - end - else - e.available_in - end - end + if !env["discourse.request_tracker.skip"] + log_request_info(env, result, info, request) end end @@ -319,4 +245,108 @@ class Middleware::RequestTracker end end end + + def find_auth_cookie(env) + min_allowed_timestamp = Time.now.to_i - (UserAuthToken::ROTATE_TIME_MINS + 1) * 60 + cookie = Auth::DefaultCurrentUserProvider.find_v1_auth_cookie(env) + if cookie && cookie[:issued_at] >= min_allowed_timestamp + cookie + end + end + + def is_private_ip?(ip) + ip = IPAddr.new(ip) + !!(ip && (ip.private? || ip.loopback?)) + rescue IPAddr::AddressFamilyError, IPAddr::InvalidAddressError + false + end + + def rate_limit(request, cookie) + warn = GlobalSetting.max_reqs_per_ip_mode == "warn" || + GlobalSetting.max_reqs_per_ip_mode == "warn+block" + block = GlobalSetting.max_reqs_per_ip_mode == "block" || + GlobalSetting.max_reqs_per_ip_mode == "warn+block" + + return if !block && !warn + + ip = request.ip + + if !GlobalSetting.max_reqs_rate_limit_on_private + return if is_private_ip?(ip) + end + + return if @@ip_skipper&.call(ip) + return if STATIC_IP_SKIPPER&.any? { |entry| entry.include?(ip) } + + ip_or_id = ip + limit_on_id = false + if cookie && cookie[:user_id] && cookie[:trust_level] && cookie[:trust_level] >= GlobalSetting.skip_per_ip_rate_limit_trust_level + ip_or_id = cookie[:user_id] + limit_on_id = true + end + + limiter10 = RateLimiter.new( + nil, + "global_ip_limit_10_#{ip_or_id}", + GlobalSetting.max_reqs_per_ip_per_10_seconds, + 10, + global: !limit_on_id, + aggressive: true, + error_code: limit_on_id ? "id_10_secs_limit" : "ip_10_secs_limit" + ) + + limiter60 = RateLimiter.new( + nil, + "global_ip_limit_60_#{ip_or_id}", + GlobalSetting.max_reqs_per_ip_per_minute, + 60, + global: !limit_on_id, + error_code: limit_on_id ? "id_60_secs_limit" : "ip_60_secs_limit", + aggressive: true + ) + + limiter_assets10 = RateLimiter.new( + nil, + "global_ip_limit_10_assets_#{ip_or_id}", + GlobalSetting.max_asset_reqs_per_ip_per_10_seconds, + 10, + error_code: limit_on_id ? "id_assets_10_secs_limit" : "ip_assets_10_secs_limit", + global: !limit_on_id + ) + + request.env['DISCOURSE_RATE_LIMITERS'] = [limiter10, limiter60] + request.env['DISCOURSE_ASSET_RATE_LIMITERS'] = [limiter_assets10] + + if !limiter_assets10.can_perform? + if warn + Discourse.warn("Global asset IP rate limit exceeded for #{ip}: 10 second rate limit", uri: request.env["REQUEST_URI"]) + end + + if block + return [ + limiter_assets10.seconds_to_wait(Time.now.to_i), + limiter_assets10.error_code + ] + end + end + + begin + type = 10 + limiter10.performed! + + type = 60 + limiter60.performed! + + nil + rescue RateLimiter::LimitExceeded => e + if warn + Discourse.warn("Global IP rate limit exceeded for #{ip}: #{type} second rate limit", uri: request.env["REQUEST_URI"]) + end + if block + [e.available_in, e.error_code] + else + nil + end + end + end end diff --git a/lib/rate_limiter.rb b/lib/rate_limiter.rb index 12f565bf17..f7572d4b05 100644 --- a/lib/rate_limiter.rb +++ b/lib/rate_limiter.rb @@ -3,7 +3,7 @@ # A redis backed rate limiter. class RateLimiter - attr_reader :max, :secs, :user, :key + attr_reader :max, :secs, :user, :key, :error_code def self.key_prefix "l-rate-limit3:" @@ -37,7 +37,7 @@ class RateLimiter "#{RateLimiter.key_prefix}:#{@user && @user.id}:#{type}" end - def initialize(user, type, max, secs, global: false, aggressive: false) + def initialize(user, type, max, secs, global: false, aggressive: false, error_code: nil) @user = user @type = type @key = build_key(type) @@ -45,6 +45,7 @@ class RateLimiter @secs = secs @global = global @aggressive = aggressive + @error_code = error_code end def clear! @@ -55,7 +56,7 @@ class RateLimiter rate_unlimited? || is_under_limit? end - def seconds_to_wait(now) + def seconds_to_wait(now = Time.now.to_i) @secs - age_of_oldest(now) end @@ -116,7 +117,7 @@ class RateLimiter now = Time.now.to_i if ((max || 0) <= 0) || rate_limiter_allowed?(now) - raise RateLimiter::LimitExceeded.new(seconds_to_wait(now), @type) if raise_error + raise RateLimiter::LimitExceeded.new(seconds_to_wait(now), @type, @error_code) if raise_error false else true diff --git a/lib/rate_limiter/limit_exceeded.rb b/lib/rate_limiter/limit_exceeded.rb index f9f4c8a3aa..0109dbf9e5 100644 --- a/lib/rate_limiter/limit_exceeded.rb +++ b/lib/rate_limiter/limit_exceeded.rb @@ -16,11 +16,12 @@ class RateLimiter # A rate limit has been exceeded. class LimitExceeded < StandardError - attr_reader :type, :available_in + attr_reader :type, :available_in, :error_code - def initialize(available_in, type = nil) + def initialize(available_in, type = nil, error_code = nil) @available_in = available_in @type = type + @error_code = error_code end def time_left diff --git a/lib/staff_constraint.rb b/lib/staff_constraint.rb index a90e264da1..3f5e5a87c4 100644 --- a/lib/staff_constraint.rb +++ b/lib/staff_constraint.rb @@ -3,10 +3,8 @@ class StaffConstraint def matches?(request) - provider = Discourse.current_user_provider.new(request.env) - provider.current_user && - provider.current_user.staff? && - custom_staff_check(request) + current_user = CurrentUser.lookup_from_env(request.env) + current_user&.staff? && custom_staff_check(request) rescue Discourse::InvalidAccess, Discourse::ReadOnly false end diff --git a/spec/components/auth/default_current_user_provider_spec.rb b/spec/components/auth/default_current_user_provider_spec.rb index 502e289f86..d684a0f540 100644 --- a/spec/components/auth/default_current_user_provider_spec.rb +++ b/spec/components/auth/default_current_user_provider_spec.rb @@ -13,16 +13,42 @@ describe Auth::DefaultCurrentUserProvider do def initialize(env) super(env) end + + def cookie_jar + @cookie_jar ||= ActionDispatch::Request.new(env).cookie_jar + end end def provider(url, opts = nil) opts ||= { method: "GET" } - env = Rack::MockRequest.env_for(url, opts) + env = create_request_env(path: url).merge(opts) TestProvider.new(env) end + def get_cookie_info(cookie_jar, name) + headers = {} + cookie_jar.always_write_cookie = true + cookie_jar.write(headers) + + header = headers["Set-Cookie"] + return if header.nil? + + info = {} + + line = header.split("\n").find { |l| l.start_with?("#{name}=") } + parts = line.split(";").map(&:strip) + + info[:value] = parts.shift.split("=")[1] + parts.each do |p| + key, value = p.split("=") + info[key.downcase.to_sym] = value || true + end + + info + end + it "can be used to pretend that a user doesn't exist" do - provider = TestProvider.new({}) + provider = TestProvider.new(create_request_env(path: "/")) expect(provider.current_user).to eq(nil) end @@ -234,11 +260,10 @@ describe Auth::DefaultCurrentUserProvider do end describe "#current_user" do - let(:unhashed_token) do + let(:cookie) do new_provider = provider('/') - cookies = {} - new_provider.log_on_user(user, {}, cookies) - cookies["_t"][:value] + new_provider.log_on_user(user, {}, new_provider.cookie_jar) + new_provider.cookie_jar["_t"] end before do @@ -251,7 +276,7 @@ describe Auth::DefaultCurrentUserProvider do end it "should not update last seen for suspended users" do - provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}") + provider2 = provider("/", "HTTP_COOKIE" => "_t=#{cookie}") u = provider2.current_user u.reload expect(u.last_seen_at).to eq_time(Time.zone.now) @@ -264,7 +289,7 @@ describe Auth::DefaultCurrentUserProvider do u.clear_last_seen_cache! - provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}") + provider2 = provider("/", "HTTP_COOKIE" => "_t=#{cookie}") expect(provider2.current_user).to eq(nil) u.reload @@ -281,7 +306,7 @@ describe Auth::DefaultCurrentUserProvider do end it "should not update User#last_seen_at" do - provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}") + provider2 = provider("/", "HTTP_COOKIE" => "_t=#{cookie}") u = provider2.current_user u.reload expect(u.last_seen_at).to eq(nil) @@ -324,19 +349,26 @@ describe Auth::DefaultCurrentUserProvider do SiteSetting.persistent_sessions = false @provider = provider('/') - cookies = {} - @provider.log_on_user(user, {}, cookies) + @provider.log_on_user(user, {}, @provider.cookie_jar) - expect(cookies["_t"][:expires]).to eq(nil) + cookie_info = get_cookie_info(@provider.cookie_jar, "_t") + expect(cookie_info[:expires]).to eq(nil) + end + + it "v0 of auth cookie is still acceptable" do + token = UserAuthToken.generate!(user_id: user.id).unhashed_auth_token + ip = "10.0.0.1" + env = { "HTTP_COOKIE" => "_t=#{token}", "REMOTE_ADDR" => ip } + expect(provider('/', env).current_user.id).to eq(user.id) end it "correctly rotates tokens" do SiteSetting.maximum_session_age = 3 @provider = provider('/') - cookies = {} - @provider.log_on_user(user, {}, cookies) + @provider.log_on_user(user, {}, @provider.cookie_jar) - unhashed_token = cookies["_t"][:value] + cookie = @provider.cookie_jar["_t"] + unhashed_token = decrypt_auth_cookie(cookie)[:token] token = UserAuthToken.find_by(user_id: user.id) @@ -347,15 +379,19 @@ describe Auth::DefaultCurrentUserProvider do # at this point we are going to try to rotate token freeze_time 20.minutes.from_now - provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}") + provider2 = provider("/", "HTTP_COOKIE" => "_t=#{cookie}") provider2.current_user token.reload expect(token.auth_token_seen).to eq(true) - cookies = {} - provider2.refresh_session(user, {}, cookies) - expect(cookies["_t"][:value]).not_to eq(unhashed_token) + provider2.refresh_session(user, {}, provider2.cookie_jar) + expect( + decrypt_auth_cookie(provider2.cookie_jar["_t"])[:token] + ).not_to eq(unhashed_token) + expect( + decrypt_auth_cookie(provider2.cookie_jar["_t"])[:token].size + ).to eq(32) token.reload expect(token.auth_token_seen).to eq(false) @@ -366,10 +402,10 @@ describe Auth::DefaultCurrentUserProvider do unverified_token = token.auth_token # old token should still work - provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}") + provider2 = provider("/", "HTTP_COOKIE" => "_t=#{cookie}") expect(provider2.current_user.id).to eq(user.id) - provider2.refresh_session(user, {}, cookies) + provider2.refresh_session(user, {}, provider2.cookie_jar) token.reload @@ -394,23 +430,23 @@ describe Auth::DefaultCurrentUserProvider do it "fires event when updating last seen" do @provider = provider('/') - cookies = {} - @provider.log_on_user(user, {}, cookies) - unhashed_token = cookies["_t"][:value] + @provider.log_on_user(user, {}, @provider.cookie_jar) + cookie = @provider.cookie_jar["_t"] + unhashed_token = decrypt_auth_cookie(cookie)[:token] freeze_time 20.minutes.from_now - provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}") - provider2.refresh_session(user, {}, {}) + provider2 = provider("/", "HTTP_COOKIE" => "_t=#{cookie}") + provider2.refresh_session(user, {}, provider2.cookie_jar) expect(@refreshes).to eq(1) end it "does not fire an event when last seen does not update" do @provider = provider('/') - cookies = {} - @provider.log_on_user(user, {}, cookies) - unhashed_token = cookies["_t"][:value] + @provider.log_on_user(user, {}, @provider.cookie_jar) + cookie = @provider.cookie_jar["_t"] + unhashed_token = decrypt_auth_cookie(cookie)[:token] freeze_time 2.minutes.from_now - provider2 = provider("/", "HTTP_COOKIE" => "_t=#{unhashed_token}") - provider2.refresh_session(user, {}, {}) + provider2 = provider("/", "HTTP_COOKIE" => "_t=#{cookie}") + provider2.refresh_session(user, {}, provider2.cookie_jar) expect(@refreshes).to eq(0) end end @@ -423,14 +459,28 @@ describe Auth::DefaultCurrentUserProvider do it "can only try 10 bad cookies a minute" do token = UserAuthToken.generate!(user_id: user.id) + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago + ) - provider('/').log_on_user(user, {}, {}) + @provider = provider('/') + @provider.log_on_user(user, {}, @provider.cookie_jar) RateLimiter.new(nil, "cookie_auth_10.0.0.1", 10, 60).clear! RateLimiter.new(nil, "cookie_auth_10.0.0.2", 10, 60).clear! ip = "10.0.0.1" - env = { "HTTP_COOKIE" => "_t=#{SecureRandom.hex}", "REMOTE_ADDR" => ip } + bad_cookie = create_auth_cookie( + token: SecureRandom.hex, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago, + ) + + env = { "HTTP_COOKIE" => "_t=#{bad_cookie}", "REMOTE_ADDR" => ip } 10.times do provider('/', env).current_user @@ -441,7 +491,7 @@ describe Auth::DefaultCurrentUserProvider do }.to raise_error(Discourse::InvalidAccess) expect { - env["HTTP_COOKIE"] = "_t=#{token.unhashed_auth_token}" + env["HTTP_COOKIE"] = "_t=#{cookie}" provider("/", env).current_user }.to raise_error(Discourse::InvalidAccess) @@ -454,14 +504,23 @@ describe Auth::DefaultCurrentUserProvider do end it "correctly removes invalid cookies" do - cookies = { "_t" => SecureRandom.hex } - provider('/').refresh_session(nil, {}, cookies) - expect(cookies.key?("_t")).to eq(false) + bad_cookie = create_auth_cookie( + token: SecureRandom.hex, + user_id: 1, + trust_level: 4, + issued_at: 5.minutes.ago, + ) + @provider = provider('/') + @provider.cookie_jar["_t"] = bad_cookie + @provider.refresh_session(nil, {}, @provider.cookie_jar) + expect(@provider.cookie_jar.key?("_t")).to eq(false) end it "logging on user always creates a new token" do - provider('/').log_on_user(user, {}, {}) - provider('/').log_on_user(user, {}, {}) + @provider = provider('/') + @provider.log_on_user(user, {}, @provider.cookie_jar) + @provider2 = provider('/') + @provider2.log_on_user(user, {}, @provider2.cookie_jar) expect(UserAuthToken.where(user_id: user.id).count).to eq(2) end @@ -484,7 +543,8 @@ describe Auth::DefaultCurrentUserProvider do expect(UserAuthToken.where(auth_token: (1..3).map { |i| "abc#{i}" }).count).to eq(3) # On next login, gets fixed - provider('/').log_on_user(user, {}, {}) + @provider = provider('/') + @provider.log_on_user(user, {}, @provider.cookie_jar) expect(UserAuthToken.where(user_id: user.id).count).to eq(UserAuthToken::MAX_SESSION_COUNT) # Oldest sessions are 1, 2, 3. They should now be deleted @@ -495,38 +555,48 @@ describe Auth::DefaultCurrentUserProvider do SiteSetting.force_https = false SiteSetting.same_site_cookies = "Lax" - cookies = {} - provider('/').log_on_user(user, {}, cookies) + @provider = provider('/') + @provider.log_on_user(user, {}, @provider.cookie_jar) - expect(cookies["_t"][:same_site]).to eq("Lax") - expect(cookies["_t"][:httponly]).to eq(true) - expect(cookies["_t"][:secure]).to eq(false) + cookie_info = get_cookie_info(@provider.cookie_jar, "_t") + expect(cookie_info[:samesite]).to eq("Lax") + expect(cookie_info[:httponly]).to eq(true) + expect(cookie_info.key?(:secure)).to eq(false) SiteSetting.force_https = true SiteSetting.same_site_cookies = "Disabled" - cookies = {} - provider('/').log_on_user(user, {}, cookies) + @provider = provider('/') + @provider.log_on_user(user, {}, @provider.cookie_jar) - expect(cookies["_t"][:secure]).to eq(true) - expect(cookies["_t"].key?(:same_site)).to eq(false) + cookie_info = get_cookie_info(@provider.cookie_jar, "_t") + expect(cookie_info[:secure]).to eq(true) + expect(cookie_info.key?(:same_site)).to eq(false) end it "correctly expires session" do SiteSetting.maximum_session_age = 2 token = UserAuthToken.generate!(user_id: user.id) + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago + ) - provider('/').log_on_user(user, {}, {}) + @provider = provider('/') + @provider.log_on_user(user, {}, @provider.cookie_jar) - expect(provider("/", "HTTP_COOKIE" => "_t=#{token.unhashed_auth_token}").current_user.id).to eq(user.id) + expect(provider("/", "HTTP_COOKIE" => "_t=#{cookie}").current_user.id).to eq(user.id) freeze_time 3.hours.from_now - expect(provider("/", "HTTP_COOKIE" => "_t=#{token.unhashed_auth_token}").current_user).to eq(nil) + expect(provider("/", "HTTP_COOKIE" => "_t=#{cookie}").current_user).to eq(nil) end it "always unstage users" do user.update!(staged: true) - provider("/").log_on_user(user, {}, {}) + @provider = provider("/") + @provider.log_on_user(user, {}, @provider.cookie_jar) user.reload expect(user.staged).to eq(false) end @@ -658,4 +728,16 @@ describe Auth::DefaultCurrentUserProvider do end end end + + it "ignores a valid auth cookie that has been tampered with" do + @provider = provider('/') + @provider.log_on_user(user, {}, @provider.cookie_jar) + + cookie = @provider.cookie_jar["_t"] + cookie = swap_2_different_characters(cookie) + + ip = "10.0.0.1" + env = { "HTTP_COOKIE" => "_t=#{cookie}", "REMOTE_ADDR" => ip } + expect(provider('/', env).current_user).to eq(nil) + end end diff --git a/spec/components/current_user_spec.rb b/spec/components/current_user_spec.rb index 78310be245..e8ec24118c 100644 --- a/spec/components/current_user_spec.rb +++ b/spec/components/current_user_spec.rb @@ -7,7 +7,16 @@ describe CurrentUser do user = Fabricate(:user, active: true) token = UserAuthToken.generate!(user_id: user.id) - env = Rack::MockRequest.env_for("/test", "HTTP_COOKIE" => "_t=#{token.unhashed_auth_token};") + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago, + ) + + env = create_request_env(path: "/test").merge( + "HTTP_COOKIE" => "_t=#{cookie};" + ) expect(CurrentUser.lookup_from_env(env)).to eq(user) end diff --git a/spec/components/guardian_spec.rb b/spec/components/guardian_spec.rb index 3ff0420bb2..86e45b0bfd 100644 --- a/spec/components/guardian_spec.rb +++ b/spec/components/guardian_spec.rb @@ -3846,9 +3846,24 @@ describe Guardian do describe '#auth_token' do it 'returns the correct auth token' do token = UserAuthToken.generate!(user_id: user.id) - env = Rack::MockRequest.env_for("/", "HTTP_COOKIE" => "_t=#{token.unhashed_auth_token};") + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago, + ) + env = create_request_env(path: "/").merge("HTTP_COOKIE" => "_t=#{cookie};") - guardian = Guardian.new(user, Rack::Request.new(env)) + guardian = Guardian.new(user, ActionDispatch::Request.new(env)) + expect(guardian.auth_token).to eq(token.auth_token) + end + + it 'supports v0 of auth cookie' do + token = UserAuthToken.generate!(user_id: user.id) + cookie = token.unhashed_auth_token + env = create_request_env(path: "/").merge("HTTP_COOKIE" => "_t=#{cookie};") + + guardian = Guardian.new(user, ActionDispatch::Request.new(env)) expect(guardian.auth_token).to eq(token.auth_token) end end diff --git a/spec/components/hijack_spec.rb b/spec/components/hijack_spec.rb index 416ee4102b..b75e522acb 100644 --- a/spec/components/hijack_spec.rb +++ b/spec/components/hijack_spec.rb @@ -58,7 +58,7 @@ describe Hijack do end end - env = {} + env = create_request_env(path: "/") middleware = Middleware::RequestTracker.new(app) middleware.call(env) diff --git a/spec/components/middleware/anonymous_cache_spec.rb b/spec/components/middleware/anonymous_cache_spec.rb index 9cf0e05866..0d7868401c 100644 --- a/spec/components/middleware/anonymous_cache_spec.rb +++ b/spec/components/middleware/anonymous_cache_spec.rb @@ -6,7 +6,7 @@ describe Middleware::AnonymousCache do let(:middleware) { Middleware::AnonymousCache.new(lambda { |_| [200, {}, []] }) } def env(opts = {}) - Rack::MockRequest.env_for("http://test.com/path?bla=1").merge(opts) + create_request_env(path: "http://test.com/path?bla=1").merge(opts) end describe Middleware::AnonymousCache::Helper do @@ -23,8 +23,15 @@ describe Middleware::AnonymousCache do expect(new_helper("ANON_CACHE_DURATION" => 10, "REQUEST_METHOD" => "POST").cacheable?).to eq(false) end - it "is false if it has an auth cookie" do - expect(new_helper("HTTP_COOKIE" => "jack=1; _t=#{"1" * 32}; jill=2").cacheable?).to eq(false) + it "is false if it has a valid auth cookie" do + cookie = create_auth_cookie(token: SecureRandom.hex) + expect(new_helper("HTTP_COOKIE" => "jack=1; _t=#{cookie}; jill=2").cacheable?).to eq(false) + end + + it "is true if it has an invalid auth cookie" do + cookie = create_auth_cookie(token: SecureRandom.hex, issued_at: 5.minutes.ago) + cookie = swap_2_different_characters(cookie) + expect(new_helper("HTTP_COOKIE" => "jack=1; _t=#{cookie}; jill=2").cacheable?).to eq(true) end it "is false for srv/status routes" do @@ -142,14 +149,15 @@ describe Middleware::AnonymousCache do global_setting :background_requests_max_queue_length, "0.5" - env = { - "HTTP_COOKIE" => "_t=#{SecureRandom.hex}", + cookie = create_auth_cookie(token: SecureRandom.hex) + env = create_request_env.merge( + "HTTP_COOKIE" => "_t=#{cookie}", "HOST" => "site.com", "REQUEST_METHOD" => "GET", "REQUEST_URI" => "/somewhere/rainbow", "REQUEST_QUEUE_SECONDS" => 2.1, "rack.input" => StringIO.new - } + ) # non background ... long request env["REQUEST_QUEUE_SECONDS"] = 2 @@ -194,15 +202,16 @@ describe Middleware::AnonymousCache do global_setting :force_anonymous_min_per_10_seconds, 2 global_setting :force_anonymous_min_queue_seconds, 1 - env = { - "HTTP_COOKIE" => "_t=#{SecureRandom.hex}", + cookie = create_auth_cookie(token: SecureRandom.hex) + env = create_request_env.merge( + "HTTP_COOKIE" => "_t=#{cookie}", "HTTP_DISCOURSE_LOGGED_IN" => "true", "HOST" => "site.com", "REQUEST_METHOD" => "GET", "REQUEST_URI" => "/somewhere/rainbow", "REQUEST_QUEUE_SECONDS" => 2.1, "rack.input" => StringIO.new - } + ) is_anon = false app.call(env.dup) diff --git a/spec/components/middleware/request_tracker_spec.rb b/spec/components/middleware/request_tracker_spec.rb index 63d9411b1f..4cdab112c7 100644 --- a/spec/components/middleware/request_tracker_spec.rb +++ b/spec/components/middleware/request_tracker_spec.rb @@ -3,16 +3,15 @@ require "rails_helper" describe Middleware::RequestTracker do - def env(opts = {}) - { + create_request_env.merge( "HTTP_HOST" => "http://test.com", "HTTP_USER_AGENT" => "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36", "REQUEST_URI" => "/path?bla=1", "REQUEST_METHOD" => "GET", "HTTP_ACCEPT" => "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", - "rack.input" => "" - }.merge(opts) + "rack.input" => StringIO.new + ).merge(opts) end before do @@ -140,9 +139,15 @@ describe Middleware::RequestTracker do let(:logged_in_data) do user = Fabricate(:user, active: true) token = UserAuthToken.generate!(user_id: user.id) + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago + ) Middleware::RequestTracker.get_data(env( "HTTP_USER_AGENT" => "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.72 Safari/537.36", - "HTTP_COOKIE" => "_t=#{token.unhashed_auth_token};" + "HTTP_COOKIE" => "_t=#{cookie};" ), ["200", { "Content-Type" => 'text/html' }], 0.1) end @@ -195,6 +200,7 @@ describe Middleware::RequestTracker do before do RateLimiter.enable RateLimiter.clear_all_global! + RateLimiter.clear_all! @old_logger = Rails.logger Rails.logger = TestLogger.new @@ -386,6 +392,177 @@ describe Middleware::RequestTracker do status, _ = middleware.call(env2) expect(status).to eq(200) end + + describe "diagnostic information" do + it "is included when the requests-per-10-seconds limit is reached" do + global_setting :max_reqs_per_ip_per_10_seconds, 1 + called = 0 + app = lambda do |_| + called += 1 + [200, {}, ["OK"]] + end + env = env("REMOTE_ADDR" => "1.1.1.1") + middleware = Middleware::RequestTracker.new(app) + status, = middleware.call(env) + expect(status).to eq(200) + expect(called).to eq(1) + + env = env("REMOTE_ADDR" => "1.1.1.1") + middleware = Middleware::RequestTracker.new(app) + status, headers, response = middleware.call(env) + expect(status).to eq(429) + expect(called).to eq(1) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq("ip_10_secs_limit") + expect(response.first).to include("Error code: ip_10_secs_limit.") + end + + it "is included when the requests-per-minute limit is reached" do + global_setting :max_reqs_per_ip_per_minute, 1 + called = 0 + app = lambda do |_| + called += 1 + [200, {}, ["OK"]] + end + env = env("REMOTE_ADDR" => "1.1.1.1") + middleware = Middleware::RequestTracker.new(app) + status, = middleware.call(env) + expect(status).to eq(200) + expect(called).to eq(1) + + env = env("REMOTE_ADDR" => "1.1.1.1") + middleware = Middleware::RequestTracker.new(app) + status, headers, response = middleware.call(env) + expect(status).to eq(429) + expect(called).to eq(1) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq("ip_60_secs_limit") + expect(response.first).to include("Error code: ip_60_secs_limit.") + end + + it "is included when the assets-requests-per-10-seconds limit is reached" do + global_setting :max_asset_reqs_per_ip_per_10_seconds, 1 + called = 0 + app = lambda do |env| + called += 1 + env["DISCOURSE_IS_ASSET_PATH"] = true + [200, {}, ["OK"]] + end + env = env("REMOTE_ADDR" => "1.1.1.1") + middleware = Middleware::RequestTracker.new(app) + status, = middleware.call(env) + expect(status).to eq(200) + expect(called).to eq(1) + + env = env("REMOTE_ADDR" => "1.1.1.1") + middleware = Middleware::RequestTracker.new(app) + status, headers, response = middleware.call(env) + expect(status).to eq(429) + expect(called).to eq(1) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq("ip_assets_10_secs_limit") + expect(response.first).to include("Error code: ip_assets_10_secs_limit.") + end + end + + it "users with high enough trust level are not rate limited per ip" do + global_setting :max_reqs_per_ip_per_minute, 1 + global_setting :skip_per_ip_rate_limit_trust_level, 3 + + envs = 3.times.map do |n| + user = Fabricate(:user, trust_level: 3) + token = UserAuthToken.generate!(user_id: user.id) + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago + ) + env("HTTP_COOKIE" => "_t=#{cookie}", "REMOTE_ADDR" => "1.1.1.1") + end + + called = 0 + app = lambda do |env| + called += 1 + [200, {}, ["OK"]] + end + envs.each do |env| + middleware = Middleware::RequestTracker.new(app) + status, = middleware.call(env) + expect(status).to eq(200) + end + expect(called).to eq(3) + + envs.each do |env| + middleware = Middleware::RequestTracker.new(app) + status, headers, response = middleware.call(env) + expect(status).to eq(429) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq("id_60_secs_limit") + expect(response.first).to include("Error code: id_60_secs_limit.") + end + expect(called).to eq(3) + end + + it "falls back to IP rate limiting if the cookie is too old" do + unfreeze_time + global_setting :max_reqs_per_ip_per_minute, 1 + global_setting :skip_per_ip_rate_limit_trust_level, 3 + user = Fabricate(:user, trust_level: 3) + token = UserAuthToken.generate!(user_id: user.id) + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago + ) + env = env("HTTP_COOKIE" => "_t=#{cookie}", "REMOTE_ADDR" => "1.1.1.1") + + called = 0 + app = lambda do |_| + called += 1 + [200, {}, ["OK"]] + end + freeze_time(12.minutes.from_now) do + middleware = Middleware::RequestTracker.new(app) + status, = middleware.call(env) + expect(status).to eq(200) + + middleware = Middleware::RequestTracker.new(app) + status, headers, response = middleware.call(env) + expect(status).to eq(429) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq("ip_60_secs_limit") + expect(response.first).to include("Error code: ip_60_secs_limit.") + end + end + + it "falls back to IP rate limiting if the cookie is tampered with" do + unfreeze_time + global_setting :max_reqs_per_ip_per_minute, 1 + global_setting :skip_per_ip_rate_limit_trust_level, 3 + user = Fabricate(:user, trust_level: 3) + token = UserAuthToken.generate!(user_id: user.id) + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: Time.zone.now + ) + cookie = swap_2_different_characters(cookie) + env = env("HTTP_COOKIE" => "_t=#{cookie}", "REMOTE_ADDR" => "1.1.1.1") + + called = 0 + app = lambda do |_| + called += 1 + [200, {}, ["OK"]] + end + + middleware = Middleware::RequestTracker.new(app) + status, = middleware.call(env) + expect(status).to eq(200) + + middleware = Middleware::RequestTracker.new(app) + status, headers, response = middleware.call(env) + expect(status).to eq(429) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq("ip_60_secs_limit") + expect(response.first).to include("Error code: ip_60_secs_limit.") + end end context "callbacks" do @@ -480,5 +657,4 @@ describe Middleware::RequestTracker do expect(headers["X-Runtime"].to_f).to be > 0 end end - end diff --git a/spec/jobs/export_user_archive_spec.rb b/spec/jobs/export_user_archive_spec.rb index 8f083f172c..aab8af6a7f 100644 --- a/spec/jobs/export_user_archive_spec.rb +++ b/spec/jobs/export_user_archive_spec.rb @@ -42,10 +42,12 @@ describe Jobs::ExportUserArchive do user.user_profile.website = 'https://doe.example.com/john' user.user_profile.save # force a UserAuthTokenLog entry - Discourse.current_user_provider.new({ + env = create_request_env.merge( 'HTTP_USER_AGENT' => 'MyWebBrowser', 'REQUEST_PATH' => '/some_path/456852', - }).log_on_user(user, {}, {}) + ) + cookie_jar = ActionDispatch::Request.new(env).cookie_jar + Discourse.current_user_provider.new(env).log_on_user(user, {}, cookie_jar) # force a nonstandard post action PostAction.new(user: user, post: post, post_action_type_id: 5).save @@ -198,10 +200,12 @@ describe Jobs::ExportUserArchive do let(:component) { 'auth_tokens' } before do - Discourse.current_user_provider.new({ + env = create_request_env.merge( 'HTTP_USER_AGENT' => 'MyWebBrowser', 'REQUEST_PATH' => '/some_path/456852', - }).log_on_user(user, {}, {}) + ) + cookie_jar = ActionDispatch::Request.new(env).cookie_jar + Discourse.current_user_provider.new(env).log_on_user(user, {}, cookie_jar) end it 'properly includes session records' do diff --git a/spec/multisite/request_tracker_spec.rb b/spec/multisite/request_tracker_spec.rb new file mode 100644 index 0000000000..3d3a7c4ef8 --- /dev/null +++ b/spec/multisite/request_tracker_spec.rb @@ -0,0 +1,142 @@ +# frozen_string_literal: true + +require "rails_helper" + +describe "RequestTracker in multisite", type: :multisite do + before do + global_setting :skip_per_ip_rate_limit_trust_level, 2 + + RateLimiter.enable + + test_multisite_connection("default") do + RateLimiter.clear_all! + end + test_multisite_connection("second") do + RateLimiter.clear_all! + end + RateLimiter.clear_all_global! + end + + def call(env, &block) + Middleware::RequestTracker.new(block).call(env) + end + + def create_env(opts) + create_request_env.merge(opts) + end + + shared_examples "ip rate limiters behavior" do |error_code, app_callback| + it "applies rate limits on an IP address across all sites" do + called = { default: 0, second: 0 } + test_multisite_connection("default") do + env = create_env("REMOTE_ADDR" => "123.10.71.4") + status, = call(env) do + called[:default] += 1 + app_callback&.call(env) + [200, {}, ["OK"]] + end + expect(status).to eq(200) + + env = create_env("REMOTE_ADDR" => "123.10.71.4") + status, headers = call(env) do + called[:default] += 1 + app_callback&.call(env) + [200, {}, ["OK"]] + end + expect(status).to eq(429) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq(error_code) + expect(called[:default]).to eq(1) + end + + test_multisite_connection("second") do + env = create_env("REMOTE_ADDR" => "123.10.71.4") + status, headers = call(env) do + called[:second] += 1 + app_callback&.call(env) + [200, {}, ["OK"]] + end + expect(status).to eq(429) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq(error_code) + expect(called[:second]).to eq(0) + end + end + end + + shared_examples "user id rate limiters behavior" do |error_code, app_callback| + it "does not leak rate limits for a user id to other sites" do + cookie = create_auth_cookie( + token: SecureRandom.hex, + user_id: 1, + trust_level: 2 + ) + called = { default: 0, second: 0 } + test_multisite_connection("default") do + env = create_env("REMOTE_ADDR" => "123.10.71.4", "HTTP_COOKIE" => "_t=#{cookie}") + status, = call(env) do + called[:default] += 1 + app_callback&.call(env) + [200, {}, ["OK"]] + end + expect(status).to eq(200) + + env = create_env("REMOTE_ADDR" => "123.10.71.4", "HTTP_COOKIE" => "_t=#{cookie}") + status, headers, = call(env) do + called[:default] += 1 + app_callback&.call(env) + [200, {}, ["OK"]] + end + expect(status).to eq(429) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq(error_code) + expect(called[:default]).to eq(1) + end + + test_multisite_connection("second") do + env = create_env("REMOTE_ADDR" => "123.10.71.4", "HTTP_COOKIE" => "_t=#{cookie}") + status, = call(env) do + called[:second] += 1 + app_callback&.call(env) + [200, {}, ["OK"]] + end + expect(status).to eq(200) + + env = create_env("REMOTE_ADDR" => "123.10.71.4", "HTTP_COOKIE" => "_t=#{cookie}") + status, headers, = call(env) do + called[:second] += 1 + app_callback&.call(env) + [200, {}, ["OK"]] + end + expect(status).to eq(429) + expect(headers["Discourse-Rate-Limit-Error-Code"]).to eq(error_code) + expect(called[:second]).to eq(1) + end + end + end + + context "10 seconds limiter" do + before do + global_setting :max_reqs_per_ip_per_10_seconds, 1 + end + + include_examples "ip rate limiters behavior", "ip_10_secs_limit" + include_examples "user id rate limiters behavior", "id_10_secs_limit" + end + + context "60 seconds limiter" do + before do + global_setting :max_reqs_per_ip_per_minute, 1 + end + + include_examples "ip rate limiters behavior", "ip_60_secs_limit" + include_examples "user id rate limiters behavior", "id_60_secs_limit" + end + + context "assets 10 seconds limiter" do + before do + global_setting :max_asset_reqs_per_ip_per_10_seconds, 1 + end + + app_callback = ->(env) { env["DISCOURSE_IS_ASSET_PATH"] = true } + include_examples "ip rate limiters behavior", "ip_assets_10_secs_limit", app_callback + include_examples "user id rate limiters behavior", "id_assets_10_secs_limit", app_callback + end +end diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb index 18e32104f2..3c7b68d79e 100644 --- a/spec/rails_helper.rb +++ b/spec/rails_helper.rb @@ -459,6 +459,44 @@ ensure Rails.logger = old_logger end +# this takes a string and returns a copy where 2 different +# characters are swapped. +# e.g. +# swap_2_different_characters("abc") => "bac" +# swap_2_different_characters("aac") => "caa" +def swap_2_different_characters(str) + swap1 = 0 + swap2 = str.split("").find_index { |c| c != str[swap1] } + # if the string is made up of 1 character + return str if !swap2 + str = str.dup + str[swap1], str[swap2] = str[swap2], str[swap1] + str +end + +def create_request_env(path: nil) + env = Rails.application.env_config.dup + env.merge!(Rack::MockRequest.env_for(path)) if path + env +end + +def create_auth_cookie(token:, user_id: nil, trust_level: nil, issued_at: Time.zone.now) + request = ActionDispatch::Request.new(create_request_env) + data = { + token: token, + user_id: user_id, + trust_level: trust_level, + issued_at: issued_at.to_i + } + cookie = request.cookie_jar.encrypted["_t"] = { value: data } + cookie[:value] +end + +def decrypt_auth_cookie(cookie) + request = ActionDispatch::Request.new(create_request_env.merge("HTTP_COOKIE" => "_t=#{cookie}")) + request.cookie_jar.encrypted["_t"] +end + class SpecSecureRandom class << self attr_accessor :value diff --git a/spec/requests/application_controller_spec.rb b/spec/requests/application_controller_spec.rb index 610cebccbd..603d2f3fa0 100644 --- a/spec/requests/application_controller_spec.rb +++ b/spec/requests/application_controller_spec.rb @@ -851,4 +851,67 @@ RSpec.describe ApplicationController do expect(response.headers["Vary"]).to eq(nil) end end + + describe "Discourse-Rate-Limit-Error-Code header" do + fab!(:admin) { Fabricate(:admin) } + + before do + RateLimiter.clear_all! + RateLimiter.enable + end + + it "is included when API key is rate limited" do + global_setting :max_admin_api_reqs_per_minute, 1 + api_key = ApiKey.create!(user_id: admin.id).key + get "/latest.json", headers: { + "Api-Key": api_key, + "Api-Username": admin.username + } + expect(response.status).to eq(200) + + get "/latest.json", headers: { + "Api-Key": api_key, + "Api-Username": admin.username + } + expect(response.status).to eq(429) + expect(response.headers["Discourse-Rate-Limit-Error-Code"]).to eq("admin_api_key_rate_limit") + end + + it "is included when user API key is rate limited" do + global_setting :max_user_api_reqs_per_minute, 1 + user_api_key = UserApiKey.create!( + user_id: admin.id, + client_id: "", + application_name: "discourseapp" + ) + user_api_key.scopes = UserApiKeyScope.all_scopes.keys.map do |name| + UserApiKeyScope.create!(name: name, user_api_key_id: user_api_key.id) + end + user_api_key.save! + + get "/session/current.json", headers: { + "User-Api-Key": user_api_key.key, + } + expect(response.status).to eq(200) + + get "/session/current.json", headers: { + "User-Api-Key": user_api_key.key, + } + expect(response.status).to eq(429) + expect( + response.headers["Discourse-Rate-Limit-Error-Code"] + ).to eq("user_api_key_limiter_60_secs") + + global_setting :max_user_api_reqs_per_minute, 100 + global_setting :max_user_api_reqs_per_day, 1 + + get "/session/current.json", headers: { + "User-Api-Key": user_api_key.key, + } + expect(response.status).to eq(429) + expect( + response.headers["Discourse-Rate-Limit-Error-Code"] + ).to eq("user_api_key_limiter_1_day") + end + end end diff --git a/spec/requests/session_controller_spec.rb b/spec/requests/session_controller_spec.rb index 18042a9efa..c4684a182e 100644 --- a/spec/requests/session_controller_spec.rb +++ b/spec/requests/session_controller_spec.rb @@ -1454,7 +1454,8 @@ RSpec.describe SessionController do expect(session[:current_user_id]).to eq(user.id) expect(user.user_auth_tokens.count).to eq(1) - expect(UserAuthToken.hash_token(cookies[:_t])).to eq(user.user_auth_tokens.first.auth_token) + unhashed_token = decrypt_auth_cookie(cookies[:_t])[:token] + expect(UserAuthToken.hash_token(unhashed_token)).to eq(user.user_auth_tokens.first.auth_token) end context "when timezone param is provided" do @@ -1640,7 +1641,8 @@ RSpec.describe SessionController do expect(session[:current_user_id]).to eq(user.id) expect(user.user_auth_tokens.count).to eq(1) - expect(UserAuthToken.hash_token(cookies[:_t])) + unhashed_token = decrypt_auth_cookie(cookies[:_t])[:token] + expect(UserAuthToken.hash_token(unhashed_token)) .to eq(user.user_auth_tokens.first.auth_token) end end @@ -1658,7 +1660,8 @@ RSpec.describe SessionController do expect(session[:current_user_id]).to eq(user.id) expect(user.user_auth_tokens.count).to eq(1) - expect(UserAuthToken.hash_token(cookies[:_t])) + unhashed_token = decrypt_auth_cookie(cookies[:_t])[:token] + expect(UserAuthToken.hash_token(unhashed_token)) .to eq(user.user_auth_tokens.first.auth_token) end end diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb index 5a4e860644..e436e9f2b1 100644 --- a/spec/requests/users_controller_spec.rb +++ b/spec/requests/users_controller_spec.rb @@ -4857,11 +4857,18 @@ describe UsersController do it 'does not let user log out of current session' do token = UserAuthToken.generate!(user_id: user.id) - env = Rack::MockRequest.env_for("/", "HTTP_COOKIE" => "_t=#{token.unhashed_auth_token};") - Guardian.any_instance.stubs(:request).returns(Rack::Request.new(env)) + cookie = create_auth_cookie( + token: token.unhashed_auth_token, + user_id: user.id, + trust_level: user.trust_level, + issued_at: 5.minutes.ago, + ) - post "/u/#{user.username}/preferences/revoke-auth-token.json", params: { token_id: token.id } + post "/u/#{user.username}/preferences/revoke-auth-token.json", + params: { token_id: token.id }, + headers: { "HTTP_COOKIE" => "_t=#{cookie}" } + expect(token.reload.id).to be_present expect(response.status).to eq(400) end diff --git a/spec/support/helpers.rb b/spec/support/helpers.rb index 417dcdca98..f0f02932c0 100644 --- a/spec/support/helpers.rb +++ b/spec/support/helpers.rb @@ -14,8 +14,9 @@ module Helpers end def log_in_user(user) + cookie_jar = ActionDispatch::Request.new(request.env).cookie_jar provider = Discourse.current_user_provider.new(request.env) - provider.log_on_user(user, session, cookies) + provider.log_on_user(user, session, cookie_jar) provider end From b96c10a90364dade1729d32ed5b771bc134d9e9f Mon Sep 17 00:00:00 2001 From: Martin Brennan Date: Thu, 18 Nov 2021 09:17:23 +1000 Subject: [PATCH 005/168] DEV: Extract shared external upload routes into controller helper (#14984) This commit refactors the direct external upload routes (get presigned put, complete external, create/abort/complete multipart) into a helper which is then included in both BackupController and the UploadController. This is done so UploadController doesn't need strange backup logic added to it, and so each controller implementing this helper can do their own validation/error handling nicely. This is a follow up to e4350bb96648622b73414712588ffc015e193562 --- .../app/components/uppy-backup-uploader.js | 1 + .../app/mixins/composer-upload-uppy.js | 1 + .../discourse/app/mixins/uppy-s3-multipart.js | 24 +- .../discourse/app/mixins/uppy-upload.js | 5 +- app/controllers/admin/backups_controller.rb | 22 ++ app/controllers/uploads_controller.rb | 366 ++---------------- config/routes.rb | 6 + lib/external_upload_helpers.rb | 347 +++++++++++++++++ .../requests/admin/backups_controller_spec.rb | 116 ++++++ spec/requests/uploads_controller_spec.rb | 92 +---- 10 files changed, 541 insertions(+), 439 deletions(-) create mode 100644 lib/external_upload_helpers.rb diff --git a/app/assets/javascripts/discourse/app/components/uppy-backup-uploader.js b/app/assets/javascripts/discourse/app/components/uppy-backup-uploader.js index 5891106f90..55be0926b7 100644 --- a/app/assets/javascripts/discourse/app/components/uppy-backup-uploader.js +++ b/app/assets/javascripts/discourse/app/components/uppy-backup-uploader.js @@ -7,6 +7,7 @@ export default Component.extend(UppyUploadMixin, { tagName: "span", type: "backup", useMultipartUploadsIfAvailable: true, + uploadRootPath: "/admin/backups", @discourseComputed("uploading", "uploadProgress") uploadButtonText(uploading, progress) { diff --git a/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js b/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js index 5146583d40..b31587a702 100644 --- a/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js +++ b/app/assets/javascripts/discourse/app/mixins/composer-upload-uppy.js @@ -33,6 +33,7 @@ import bootbox from "bootbox"; // functionality and event binding. // export default Mixin.create(ExtendableUploader, UppyS3Multipart, { + uploadRootPath: "/uploads", uploadTargetBound: false, @observes("composerModel.uploadCancelled") diff --git a/app/assets/javascripts/discourse/app/mixins/uppy-s3-multipart.js b/app/assets/javascripts/discourse/app/mixins/uppy-s3-multipart.js index 6253234297..2c4d72bedd 100644 --- a/app/assets/javascripts/discourse/app/mixins/uppy-s3-multipart.js +++ b/app/assets/javascripts/discourse/app/mixins/uppy-s3-multipart.js @@ -1,4 +1,5 @@ import Mixin from "@ember/object/mixin"; +import getUrl from "discourse-common/lib/get-url"; import { bind } from "discourse-common/utils/decorators"; import { Promise } from "rsvp"; import { ajax } from "discourse/lib/ajax"; @@ -50,7 +51,7 @@ export default Mixin.create({ data.metadata = { "sha1-checksum": file.meta.sha1_checksum }; } - return ajax("/uploads/create-multipart.json", { + return ajax(getUrl(`${this.uploadRootPath}/create-multipart.json`), { type: "POST", data, // uppy is inconsistent, an error here fires the upload-error event @@ -70,13 +71,16 @@ export default Mixin.create({ if (file.preparePartsRetryAttempts === undefined) { file.preparePartsRetryAttempts = 0; } - return ajax("/uploads/batch-presign-multipart-parts.json", { - type: "POST", - data: { - part_numbers: partData.partNumbers, - unique_identifier: file.meta.unique_identifier, - }, - }) + return ajax( + getUrl(`${this.uploadRootPath}/batch-presign-multipart-parts.json`), + { + type: "POST", + data: { + part_numbers: partData.partNumbers, + unique_identifier: file.meta.unique_identifier, + }, + } + ) .then((data) => { if (file.preparePartsRetryAttempts) { delete file.preparePartsRetryAttempts; @@ -122,7 +126,7 @@ export default Mixin.create({ const parts = data.parts.map((part) => { return { part_number: part.PartNumber, etag: part.ETag }; }); - return ajax("/uploads/complete-multipart.json", { + return ajax(getUrl(`${this.uploadRootPath}/complete-multipart.json`), { type: "POST", contentType: "application/json", data: JSON.stringify({ @@ -155,7 +159,7 @@ export default Mixin.create({ return; } - return ajax("/uploads/abort-multipart.json", { + return ajax(getUrl(`${this.uploadRootPath}/abort-multipart.json`), { type: "POST", data: { external_upload_identifier: uploadId, diff --git a/app/assets/javascripts/discourse/app/mixins/uppy-upload.js b/app/assets/javascripts/discourse/app/mixins/uppy-upload.js index cabcfc2ae4..f94ca0b40e 100644 --- a/app/assets/javascripts/discourse/app/mixins/uppy-upload.js +++ b/app/assets/javascripts/discourse/app/mixins/uppy-upload.js @@ -27,6 +27,7 @@ export default Mixin.create(UppyS3Multipart, { autoStartUploads: true, _inProgressUploads: 0, id: null, + uploadRootPath: "/uploads", uploadDone() { warn("You should implement `uploadDone`", { @@ -223,7 +224,7 @@ export default Mixin.create(UppyS3Multipart, { data.metadata = { "sha1-checksum": file.meta.sha1_checksum }; } - return ajax(getUrl("/uploads/generate-presigned-put"), { + return ajax(getUrl(`${this.uploadRootPath}/generate-presigned-put`), { type: "POST", data, }) @@ -277,7 +278,7 @@ export default Mixin.create(UppyS3Multipart, { }, _completeExternalUpload(file) { - return ajax(getUrl("/uploads/complete-external-upload"), { + return ajax(getUrl(`${this.uploadRootPath}/complete-external-upload`), { type: "POST", data: deepMerge( { unique_identifier: file.meta.uniqueUploadIdentifier }, diff --git a/app/controllers/admin/backups_controller.rb b/app/controllers/admin/backups_controller.rb index 0ea55fe150..188db1fdcc 100644 --- a/app/controllers/admin/backups_controller.rb +++ b/app/controllers/admin/backups_controller.rb @@ -4,6 +4,8 @@ require "backup_restore" require "backup_restore/backup_store" class Admin::BackupsController < Admin::AdminController + include ExternalUploadHelpers + before_action :ensure_backups_enabled skip_before_action :check_xhr, only: [:index, :show, :logs, :check_backup_chunk, :upload_backup_chunk] @@ -234,4 +236,24 @@ class Admin::BackupsController < Admin::AdminController def render_error(message_key) render json: failed_json.merge(message: I18n.t(message_key)) end + + def validate_before_create_multipart(file_name:, file_size:, upload_type:) + raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_extension?(file_name) + raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.invalid_filename")) unless valid_filename?(file_name) + end + + def self.serialize_upload(_upload) + {} # noop, the backup does not create an upload record + end + + def create_direct_multipart_upload + begin + yield + rescue BackupRestore::BackupStore::StorageError => err + message = debug_upload_error(err, I18n.t("upload.create_multipart_failure", additional_detail: err.message)) + raise ExternalUploadHelpers::ExternalUploadValidationError.new(message) + rescue BackupRestore::BackupStore::BackupFileExists + raise ExternalUploadHelpers::ExternalUploadValidationError.new(I18n.t("backup.file_exists")) + end + end end diff --git a/app/controllers/uploads_controller.rb b/app/controllers/uploads_controller.rb index c2ea7a175f..f84a0ebd0a 100644 --- a/app/controllers/uploads_controller.rb +++ b/app/controllers/uploads_controller.rb @@ -3,36 +3,17 @@ require "mini_mime" class UploadsController < ApplicationController + include ExternalUploadHelpers + requires_login except: [:show, :show_short, :show_secure] skip_before_action :preload_json, :check_xhr, :redirect_to_login_if_required, only: [:show, :show_short, :show_secure] protect_from_forgery except: :show before_action :is_asset_path, :apply_cdn_headers, only: [:show, :show_short, :show_secure] - before_action :external_store_check, only: [ - :show_secure, - :generate_presigned_put, - :complete_external_upload, - :create_multipart, - :batch_presign_multipart_parts, - :abort_multipart, - :complete_multipart - ] - before_action :direct_s3_uploads_check, only: [ - :generate_presigned_put, - :complete_external_upload, - :create_multipart, - :batch_presign_multipart_parts, - :abort_multipart, - :complete_multipart - ] - before_action :can_upload_external?, only: [:create_multipart, :generate_presigned_put] + before_action :external_store_check, only: [:show_secure] SECURE_REDIRECT_GRACE_SECONDS = 5 - PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE = 10 - CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10 - COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10 - BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE = 10 def create # capture current user for block later on @@ -208,285 +189,25 @@ class UploadsController < ApplicationController } end - def generate_presigned_put - RateLimiter.new( - current_user, "generate-presigned-put-upload-stub", PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE, 1.minute - ).performed! - - file_name = params.require(:file_name) - file_size = params.require(:file_size).to_i - type = params.require(:type) - - if file_size_too_big?(file_name, file_size) - return render_json_error( - I18n.t("upload.attachments.too_large_humanized", max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_attachment_size_kb.kilobytes)), - status: 422 - ) - end - - external_upload_data = ExternalUploadManager.create_direct_upload( - current_user: current_user, - file_name: file_name, - file_size: file_size, - upload_type: type, - metadata: parse_allowed_metadata(params[:metadata]) - ) - - render json: external_upload_data - end - - def complete_external_upload - unique_identifier = params.require(:unique_identifier) - external_upload_stub = ExternalUploadStub.find_by( - unique_identifier: unique_identifier, created_by: current_user - ) - return render_404 if external_upload_stub.blank? - - complete_external_upload_via_manager(external_upload_stub) - end - - def complete_external_upload_via_manager(external_upload_stub) - opts = { - for_private_message: params[:for_private_message]&.to_s == "true", - for_site_setting: params[:for_site_setting]&.to_s == "true", - pasted: params[:pasted]&.to_s == "true", - } - - external_upload_manager = ExternalUploadManager.new(external_upload_stub, opts) - hijack do - begin - upload = external_upload_manager.transform! - - if upload.errors.empty? - response_serialized = external_upload_stub.upload_type != "backup" ? UploadsController.serialize_upload(upload) : {} - external_upload_stub.destroy! - render json: response_serialized, status: 200 - else - render_json_error(upload.errors.to_hash.values.flatten, status: 422) - end - rescue ExternalUploadManager::SizeMismatchError => err - render_json_error( - debug_upload_error(err, "upload.size_mismatch_failure", additional_detail: err.message), - status: 422 - ) - rescue ExternalUploadManager::ChecksumMismatchError => err - render_json_error( - debug_upload_error(err, "upload.checksum_mismatch_failure", additional_detail: err.message), - status: 422 - ) - rescue ExternalUploadManager::CannotPromoteError => err - render_json_error( - debug_upload_error(err, "upload.cannot_promote_failure", additional_detail: err.message), - status: 422 - ) - rescue ExternalUploadManager::DownloadFailedError, Aws::S3::Errors::NotFound => err - render_json_error( - debug_upload_error(err, "upload.download_failure", additional_detail: err.message), - status: 422 - ) - rescue => err - Discourse.warn_exception( - err, message: "Complete external upload failed unexpectedly for user #{current_user.id}" - ) - - render_json_error(I18n.t("upload.failed"), status: 422) - end - end - end - - def create_multipart - RateLimiter.new( - current_user, "create-multipart-upload", CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute - ).performed! - - file_name = params.require(:file_name) - file_size = params.require(:file_size).to_i - upload_type = params.require(:upload_type) - - if upload_type == "backup" - ensure_staff - return render_json_error(I18n.t("backup.backup_file_should_be_tar_gz")) unless valid_backup_extension?(file_name) - return render_json_error(I18n.t("backup.invalid_filename")) unless valid_backup_filename?(file_name) - else - if file_size_too_big?(file_name, file_size) - return render_json_error( - I18n.t("upload.attachments.too_large_humanized", max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_attachment_size_kb.kilobytes)), - status: 422 - ) - end - end - - begin - external_upload_data = ExternalUploadManager.create_direct_multipart_upload( - current_user: current_user, - file_name: file_name, - file_size: file_size, - upload_type: upload_type, - metadata: parse_allowed_metadata(params[:metadata]) - ) - rescue Aws::S3::Errors::ServiceError => err - return render_json_error( - debug_upload_error(err, "upload.create_multipart_failure", additional_detail: err.message), - status: 422 - ) - rescue BackupRestore::BackupStore::BackupFileExists - return render_json_error(I18n.t("backup.file_exists"), status: 422) - rescue BackupRestore::BackupStore::StorageError => err - return render_json_error( - debug_upload_error(err, "upload.create_multipart_failure", additional_detail: err.message), - status: 422 - ) - end - - render json: external_upload_data - end - - def batch_presign_multipart_parts - part_numbers = params.require(:part_numbers) - unique_identifier = params.require(:unique_identifier) - - RateLimiter.new( - current_user, "batch-presign", BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE, 1.minute - ).performed! - - part_numbers = part_numbers.map do |part_number| - validate_part_number(part_number) - end - - external_upload_stub = ExternalUploadStub.find_by( - unique_identifier: unique_identifier, created_by: current_user - ) - return render_404 if external_upload_stub.blank? - - if !multipart_upload_exists?(external_upload_stub) - return render_404 - end - - store = multipart_store(external_upload_stub.upload_type) - - presigned_urls = {} - part_numbers.each do |part_number| - presigned_urls[part_number] = store.presign_multipart_part( - upload_id: external_upload_stub.external_upload_identifier, - key: external_upload_stub.key, - part_number: part_number - ) - end - - render json: { presigned_urls: presigned_urls } - end - - def validate_part_number(part_number) - part_number = part_number.to_i - if !part_number.between?(1, 10000) - raise Discourse::InvalidParameters.new( - "Each part number should be between 1 and 10000" - ) - end - part_number - end - - def multipart_upload_exists?(external_upload_stub) - store = multipart_store(external_upload_stub.upload_type) - begin - store.list_multipart_parts( - upload_id: external_upload_stub.external_upload_identifier, - key: external_upload_stub.key, - max_parts: 1 - ) - rescue Aws::S3::Errors::NoSuchUpload => err - debug_upload_error(err, "upload.external_upload_not_found", { additional_detail: "path: #{external_upload_stub.key}" }) - return false - end - true - end - - def abort_multipart - external_upload_identifier = params.require(:external_upload_identifier) - external_upload_stub = ExternalUploadStub.find_by( - external_upload_identifier: external_upload_identifier - ) - - # The stub could have already been deleted by an earlier error via - # ExternalUploadManager, so we consider this a great success if the - # stub is already gone. - return render json: success_json if external_upload_stub.blank? - - return render_404 if external_upload_stub.created_by_id != current_user.id - store = multipart_store(external_upload_stub.upload_type) - - begin - store.abort_multipart( - upload_id: external_upload_stub.external_upload_identifier, - key: external_upload_stub.key - ) - rescue Aws::S3::Errors::ServiceError => err - return render_json_error( - debug_upload_error(err, "upload.abort_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}"), - status: 422 - ) - end - - external_upload_stub.destroy! - - render json: success_json - end - - def complete_multipart - unique_identifier = params.require(:unique_identifier) - parts = params.require(:parts) - - RateLimiter.new( - current_user, "complete-multipart-upload", COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute - ).performed! - - external_upload_stub = ExternalUploadStub.find_by( - unique_identifier: unique_identifier, created_by: current_user - ) - return render_404 if external_upload_stub.blank? - - if !multipart_upload_exists?(external_upload_stub) - return render_404 - end - - store = multipart_store(external_upload_stub.upload_type) - parts = parts.map do |part| - part_number = part[:part_number] - etag = part[:etag] - part_number = validate_part_number(part_number) - - if etag.blank? - raise Discourse::InvalidParameters.new("All parts must have an etag and a valid part number") - end - - # this is done so it's an array of hashes rather than an array of - # ActionController::Parameters - { part_number: part_number, etag: etag } - end.sort_by do |part| - part[:part_number] - end - - begin - complete_response = store.complete_multipart( - upload_id: external_upload_stub.external_upload_identifier, - key: external_upload_stub.key, - parts: parts - ) - rescue Aws::S3::Errors::ServiceError => err - return render_json_error( - debug_upload_error(err, "upload.complete_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}"), - status: 422 - ) - end - - complete_external_upload_via_manager(external_upload_stub) - end - protected - def multipart_store(upload_type) - ensure_staff if upload_type == "backup" - ExternalUploadManager.store_for_upload_type(upload_type) + def validate_before_create_multipart(file_name:, file_size:, upload_type:) + validate_file_size(file_name: file_name, file_size: file_size) + end + + def validate_before_create_direct_upload(file_name:, file_size:, upload_type:) + validate_file_size(file_name: file_name, file_size: file_size) + end + + def validate_file_size(file_name:, file_size:) + if file_size_too_big?(file_name, file_size) + raise ExternalUploadValidationError.new( + I18n.t( + "upload.attachments.too_large_humanized", + max_size: ActiveSupport::NumberHelper.number_to_human_size(SiteSetting.max_attachment_size_kb.kilobytes) + ) + ) + end end def force_download? @@ -497,10 +218,6 @@ class UploadsController < ApplicationController raise Discourse::InvalidParameters.new("XHR not allowed") end - def render_404 - raise Discourse::NotFound - end - def self.serialize_upload(data) # as_json.as_json is not a typo... as_json in AM serializer returns keys as symbols, we need them # as strings here @@ -556,18 +273,6 @@ class UploadsController < ApplicationController private - def external_store_check - return render_404 if !Discourse.store.external? - end - - def direct_s3_uploads_check - return render_404 if !SiteSetting.enable_direct_s3_uploads - end - - def can_upload_external? - raise Discourse::InvalidAccess if !guardian.can_upload_external? - end - # We can pre-emptively check size for attachments, but not for images # as they may be further reduced in size by UploadCreator (at this point # they may have already been reduced in size by preprocessors) @@ -593,29 +298,12 @@ class UploadsController < ApplicationController send_file(file_path, opts) end - def debug_upload_error(err, translation_key, translation_params = {}) - return if !SiteSetting.enable_upload_debug_mode - message = I18n.t(translation_key, translation_params) - Discourse.warn_exception(err, message: message) - Rails.env.development? ? message : I18n.t("upload.failed") - end - - # don't want people posting arbitrary S3 metadata so we just take the - # one we need. all of these will be converted to x-amz-meta- metadata - # fields in S3 so it's best to use dashes in the names for consistency - # - # this metadata is baked into the presigned url and is not altered when - # sending the PUT from the clientside to the presigned url - def parse_allowed_metadata(metadata) - return if metadata.blank? - metadata.permit("sha1-checksum").to_h - end - - def valid_backup_extension?(filename) - /\.(tar\.gz|t?gz)$/i =~ filename - end - - def valid_backup_filename?(filename) - !!(/^[a-zA-Z0-9\._\-]+$/ =~ filename) + def create_direct_multipart_upload + begin + yield + rescue Aws::S3::Errors::ServiceError => err + message = debug_upload_error(err, I18n.t("upload.create_multipart_failure", additional_detail: err.message)) + raise ExternalUploadHelpers::ExternalUploadValidationError.new(message) + end end end diff --git a/config/routes.rb b/config/routes.rb index 1c52c52961..943a773fff 100644 --- a/config/routes.rb +++ b/config/routes.rb @@ -304,6 +304,12 @@ Discourse::Application.routes.draw do post "restore" => "backups#restore", constraints: { id: RouteFormat.backup } end collection do + # multipart uploads + post "create-multipart" => "backups#create_multipart", format: :json + post "complete-multipart" => "backups#complete_multipart", format: :json + post "abort-multipart" => "backups#abort_multipart", format: :json + post "batch-presign-multipart-parts" => "backups#batch_presign_multipart_parts", format: :json + get "logs" => "backups#logs" get "status" => "backups#status" delete "cancel" => "backups#cancel" diff --git a/lib/external_upload_helpers.rb b/lib/external_upload_helpers.rb new file mode 100644 index 0000000000..ff187e5de3 --- /dev/null +++ b/lib/external_upload_helpers.rb @@ -0,0 +1,347 @@ +# frozen_string_literal: true + +# Extends controllers with the methods required to do direct +# external uploads. +module ExternalUploadHelpers + extend ActiveSupport::Concern + + class ExternalUploadValidationError < StandardError; end + + PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE = 10 + CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10 + COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE = 10 + BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE = 10 + + included do + before_action :external_store_check, only: [ + :generate_presigned_put, + :complete_external_upload, + :create_multipart, + :batch_presign_multipart_parts, + :abort_multipart, + :complete_multipart + ] + before_action :direct_s3_uploads_check, only: [ + :generate_presigned_put, + :complete_external_upload, + :create_multipart, + :batch_presign_multipart_parts, + :abort_multipart, + :complete_multipart + ] + before_action :can_upload_external?, only: [:create_multipart, :generate_presigned_put] + end + + def generate_presigned_put + RateLimiter.new( + current_user, "generate-presigned-put-upload-stub", ExternalUploadHelpers::PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE, 1.minute + ).performed! + + file_name = params.require(:file_name) + file_size = params.require(:file_size).to_i + type = params.require(:type) + + begin + validate_before_create_direct_upload( + file_name: file_name, + file_size: file_size, + upload_type: type + ) + rescue ExternalUploadValidationError => err + return render_json_error(err.message, status: 422) + end + + external_upload_data = ExternalUploadManager.create_direct_upload( + current_user: current_user, + file_name: file_name, + file_size: file_size, + upload_type: type, + metadata: parse_allowed_metadata(params[:metadata]) + ) + + render json: external_upload_data + end + + def complete_external_upload + unique_identifier = params.require(:unique_identifier) + external_upload_stub = ExternalUploadStub.find_by( + unique_identifier: unique_identifier, created_by: current_user + ) + return render_404 if external_upload_stub.blank? + + complete_external_upload_via_manager(external_upload_stub) + end + + def create_multipart + RateLimiter.new( + current_user, "create-multipart-upload", ExternalUploadHelpers::CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute + ).performed! + + file_name = params.require(:file_name) + file_size = params.require(:file_size).to_i + upload_type = params.require(:upload_type) + + begin + validate_before_create_multipart( + file_name: file_name, + file_size: file_size, + upload_type: upload_type + ) + rescue ExternalUploadValidationError => err + return render_json_error(err.message, status: 422) + end + + begin + external_upload_data = create_direct_multipart_upload do + ExternalUploadManager.create_direct_multipart_upload( + current_user: current_user, + file_name: file_name, + file_size: file_size, + upload_type: upload_type, + metadata: parse_allowed_metadata(params[:metadata]) + ) + end + rescue ExternalUploadHelpers::ExternalUploadValidationError => err + return render_json_error(err.message, status: 422) + end + + render json: external_upload_data + end + + def batch_presign_multipart_parts + part_numbers = params.require(:part_numbers) + unique_identifier = params.require(:unique_identifier) + + RateLimiter.new( + current_user, "batch-presign", ExternalUploadHelpers::BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE, 1.minute + ).performed! + + part_numbers = part_numbers.map do |part_number| + validate_part_number(part_number) + end + + external_upload_stub = ExternalUploadStub.find_by( + unique_identifier: unique_identifier, created_by: current_user + ) + return render_404 if external_upload_stub.blank? + + if !multipart_upload_exists?(external_upload_stub) + return render_404 + end + + store = multipart_store(external_upload_stub.upload_type) + + presigned_urls = {} + part_numbers.each do |part_number| + presigned_urls[part_number] = store.presign_multipart_part( + upload_id: external_upload_stub.external_upload_identifier, + key: external_upload_stub.key, + part_number: part_number + ) + end + + render json: { presigned_urls: presigned_urls } + end + + def multipart_upload_exists?(external_upload_stub) + store = multipart_store(external_upload_stub.upload_type) + begin + store.list_multipart_parts( + upload_id: external_upload_stub.external_upload_identifier, + key: external_upload_stub.key, + max_parts: 1 + ) + rescue Aws::S3::Errors::NoSuchUpload => err + debug_upload_error(err, I18n.t("upload.external_upload_not_found", additional_detail: "path: #{external_upload_stub.key}")) + return false + end + true + end + + def abort_multipart + external_upload_identifier = params.require(:external_upload_identifier) + external_upload_stub = ExternalUploadStub.find_by( + external_upload_identifier: external_upload_identifier + ) + + # The stub could have already been deleted by an earlier error via + # ExternalUploadManager, so we consider this a great success if the + # stub is already gone. + return render json: success_json if external_upload_stub.blank? + + return render_404 if external_upload_stub.created_by_id != current_user.id + store = multipart_store(external_upload_stub.upload_type) + + begin + store.abort_multipart( + upload_id: external_upload_stub.external_upload_identifier, + key: external_upload_stub.key + ) + rescue Aws::S3::Errors::ServiceError => err + return render_json_error( + debug_upload_error(err, I18n.t("upload.abort_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}")), + status: 422 + ) + end + + external_upload_stub.destroy! + + render json: success_json + end + + def complete_multipart + unique_identifier = params.require(:unique_identifier) + parts = params.require(:parts) + + RateLimiter.new( + current_user, "complete-multipart-upload", ExternalUploadHelpers::COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE, 1.minute + ).performed! + + external_upload_stub = ExternalUploadStub.find_by( + unique_identifier: unique_identifier, created_by: current_user + ) + return render_404 if external_upload_stub.blank? + + if !multipart_upload_exists?(external_upload_stub) + return render_404 + end + + store = multipart_store(external_upload_stub.upload_type) + parts = parts.map do |part| + part_number = part[:part_number] + etag = part[:etag] + part_number = validate_part_number(part_number) + + if etag.blank? + raise Discourse::InvalidParameters.new("All parts must have an etag and a valid part number") + end + + # this is done so it's an array of hashes rather than an array of + # ActionController::Parameters + { part_number: part_number, etag: etag } + end.sort_by do |part| + part[:part_number] + end + + begin + complete_response = store.complete_multipart( + upload_id: external_upload_stub.external_upload_identifier, + key: external_upload_stub.key, + parts: parts + ) + rescue Aws::S3::Errors::ServiceError => err + return render_json_error( + debug_upload_error(err, I18n.t("upload.complete_multipart_failure", additional_detail: "external upload stub id: #{external_upload_stub.id}")), + status: 422 + ) + end + + complete_external_upload_via_manager(external_upload_stub) + end + + private + + def complete_external_upload_via_manager(external_upload_stub) + opts = { + for_private_message: params[:for_private_message]&.to_s == "true", + for_site_setting: params[:for_site_setting]&.to_s == "true", + pasted: params[:pasted]&.to_s == "true", + } + + external_upload_manager = ExternalUploadManager.new(external_upload_stub, opts) + hijack do + begin + upload = external_upload_manager.transform! + + if upload.errors.empty? + response_serialized = self.class.serialize_upload(upload) + external_upload_stub.destroy! + render json: response_serialized, status: 200 + else + render_json_error(upload.errors.to_hash.values.flatten, status: 422) + end + rescue ExternalUploadManager::SizeMismatchError => err + render_json_error( + debug_upload_error(err, I18n.t("upload.size_mismatch_failure", additional_detail: err.message)), + status: 422 + ) + rescue ExternalUploadManager::ChecksumMismatchError => err + render_json_error( + debug_upload_error(err, I18n.t("upload.checksum_mismatch_failure", additional_detail: err.message)), + status: 422 + ) + rescue ExternalUploadManager::CannotPromoteError => err + render_json_error( + debug_upload_error(err, I18n.t("upload.cannot_promote_failure", additional_detail: err.message)), + status: 422 + ) + rescue ExternalUploadManager::DownloadFailedError, Aws::S3::Errors::NotFound => err + render_json_error( + debug_upload_error(err, I18n.t("upload.download_failure", additional_detail: err.message)), + status: 422 + ) + rescue => err + Discourse.warn_exception( + err, message: "Complete external upload failed unexpectedly for user #{current_user.id}" + ) + + render_json_error(I18n.t("upload.failed"), status: 422) + end + end + end + + def validate_before_create_direct_upload(file_name:, file_size:, upload_type:) + # noop, should be overridden + end + + def validate_before_create_multipart(file_name:, file_size:, upload_type:) + # noop, should be overridden + end + + def validate_part_number(part_number) + part_number = part_number.to_i + if !part_number.between?(1, 10000) + raise Discourse::InvalidParameters.new( + "Each part number should be between 1 and 10000" + ) + end + part_number + end + + def debug_upload_error(err, friendly_message) + return if !SiteSetting.enable_upload_debug_mode + Discourse.warn_exception(err, message: friendly_message) + Rails.env.development? ? friendly_message : I18n.t("upload.failed") + end + + def multipart_store(upload_type) + ExternalUploadManager.store_for_upload_type(upload_type) + end + + def external_store_check + return render_404 if !Discourse.store.external? + end + + def direct_s3_uploads_check + return render_404 if !SiteSetting.enable_direct_s3_uploads + end + + def can_upload_external? + raise Discourse::InvalidAccess if !guardian.can_upload_external? + end + + # don't want people posting arbitrary S3 metadata so we just take the + # one we need. all of these will be converted to x-amz-meta- metadata + # fields in S3 so it's best to use dashes in the names for consistency + # + # this metadata is baked into the presigned url and is not altered when + # sending the PUT from the clientside to the presigned url + def parse_allowed_metadata(metadata) + return if metadata.blank? + metadata.permit("sha1-checksum").to_h + end + + def render_404 + raise Discourse::NotFound + end +end diff --git a/spec/requests/admin/backups_controller_spec.rb b/spec/requests/admin/backups_controller_spec.rb index fa72233a59..6e3c06ec49 100644 --- a/spec/requests/admin/backups_controller_spec.rb +++ b/spec/requests/admin/backups_controller_spec.rb @@ -416,4 +416,120 @@ RSpec.describe Admin::BackupsController do expect(response).to be_not_found end end + + describe "S3 multipart uploads" do + let(:upload_type) { "backup" } + let(:test_bucket_prefix) { "test_#{ENV['TEST_ENV_NUMBER'].presence || '0'}" } + let(:backup_file_exists_response) { { status: 404 } } + let(:mock_multipart_upload_id) { "ibZBv_75gd9r8lH_gqXatLdxMVpAlj6CFTR.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--" } + + before do + setup_s3 + SiteSetting.enable_direct_s3_uploads = true + SiteSetting.s3_backup_bucket = "s3-backup-bucket" + SiteSetting.backup_location = BackupLocationSiteSetting::S3 + stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/").to_return(status: 200, body: "", headers: {}) + stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/default/test.tar.gz").to_return( + backup_file_exists_response + ) + end + + context "when the user is not admin" do + before do + admin.update(admin: false) + end + + it "errors with invalid access error" do + post "/admin/backups/create-multipart.json", params: { + file_name: "test.tar.gz", + upload_type: upload_type, + file_size: 4098 + } + expect(response.status).to eq(404) + end + end + + context "when the user is admin" do + def stub_create_multipart_backup_request + BackupRestore::S3BackupStore.any_instance.stubs(:temporary_upload_path).returns( + "temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz" + ) + create_multipart_result = <<~BODY + \n + + s3-backup-bucket + temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz + #{mock_multipart_upload_id} + + BODY + stub_request(:post, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz?uploads"). + to_return(status: 200, body: create_multipart_result) + end + + it "creates the multipart upload" do + stub_create_multipart_backup_request + post "/admin/backups/create-multipart.json", params: { + file_name: "test.tar.gz", + upload_type: upload_type, + file_size: 4098 + } + expect(response.status).to eq(200) + result = response.parsed_body + + external_upload_stub = ExternalUploadStub.where( + unique_identifier: result["unique_identifier"], + original_filename: "test.tar.gz", + created_by: admin, + upload_type: upload_type, + key: result["key"], + multipart: true + ) + expect(external_upload_stub.exists?).to eq(true) + end + + context "when backup of same filename already exists" do + let(:backup_file_exists_response) { { status: 200, body: "" } } + + it "throws an error" do + post "/admin/backups/create-multipart.json", params: { + file_name: "test.tar.gz", + upload_type: upload_type, + file_size: 4098 + } + expect(response.status).to eq(422) + expect(response.parsed_body["errors"]).to include( + I18n.t("backup.file_exists") + ) + end + end + + context "when filename is invalid" do + it "throws an error" do + post "/admin/backups/create-multipart.json", params: { + file_name: "blah $$##.tar.gz", + upload_type: upload_type, + file_size: 4098 + } + expect(response.status).to eq(422) + expect(response.parsed_body["errors"]).to include( + I18n.t("backup.invalid_filename") + ) + end + end + + context "when extension is invalid" do + it "throws an error" do + post "/admin/backups/create-multipart.json", params: { + file_name: "test.png", + upload_type: upload_type, + file_size: 4098 + } + expect(response.status).to eq(422) + expect(response.parsed_body["errors"]).to include( + I18n.t("backup.backup_file_should_be_tar_gz") + ) + end + end + end + end end diff --git a/spec/requests/uploads_controller_spec.rb b/spec/requests/uploads_controller_spec.rb index 85d7df04da..4043c018c8 100644 --- a/spec/requests/uploads_controller_spec.rb +++ b/spec/requests/uploads_controller_spec.rb @@ -764,7 +764,7 @@ describe UploadsController do RateLimiter.enable RateLimiter.clear_all! - stub_const(UploadsController, "PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE", 1) do + stub_const(ExternalUploadHelpers, "PRESIGNED_PUT_RATE_LIMIT_PER_MINUTE", 1) do post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 } post "/uploads/generate-presigned-put.json", params: { file_name: "test.png", type: "card_background", file_size: 1024 } end @@ -793,8 +793,6 @@ describe UploadsController do sign_in(user) SiteSetting.enable_direct_s3_uploads = true setup_s3 - SiteSetting.s3_backup_bucket = "s3-backup-bucket" - SiteSetting.backup_location = BackupLocationSiteSetting::S3 end it "errors if the correct params are not provided" do @@ -902,7 +900,7 @@ describe UploadsController do RateLimiter.clear_all! stub_create_multipart_request - stub_const(UploadsController, "CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do + stub_const(ExternalUploadHelpers, "CREATE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do post "/uploads/create-multipart.json", params: { file_name: "test.png", upload_type: "composer", @@ -918,88 +916,6 @@ describe UploadsController do expect(response.status).to eq(429) end end - - context "when the upload_type is backup" do - let(:upload_type) { "backup" } - let(:backup_file_exists_response) { { status: 404 } } - - before do - stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/").to_return(status: 200, body: "", headers: {}) - stub_request(:head, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/default/test.tar.gz").to_return( - backup_file_exists_response - ) - end - - context "when the user is not admin" do - it "errors with invalid access error" do - post "/uploads/create-multipart.json", params: { - file_name: "test.tar.gz", - upload_type: upload_type, - file_size: 4098 - } - expect(response.status).to eq(403) - end - end - - context "when the user is admin" do - before do - user.update(admin: true) - end - - def stub_create_multipart_backup_request - BackupRestore::S3BackupStore.any_instance.stubs(:temporary_upload_path).returns( - "temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz" - ) - create_multipart_result = <<~BODY - \n - - s3-backup-bucket - temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz - #{mock_multipart_upload_id} - - BODY - stub_request(:post, "https://s3-backup-bucket.s3.us-west-1.amazonaws.com/temp/default/#{test_bucket_prefix}/28fccf8259bbe75b873a2bd2564b778c/2u98j832nx93272x947823.gz?uploads"). - to_return(status: 200, body: create_multipart_result) - end - - it "creates the multipart upload" do - stub_create_multipart_backup_request - post "/uploads/create-multipart.json", params: { - file_name: "test.tar.gz", - upload_type: upload_type, - file_size: 4098 - } - expect(response.status).to eq(200) - result = response.parsed_body - - external_upload_stub = ExternalUploadStub.where( - unique_identifier: result["unique_identifier"], - original_filename: "test.tar.gz", - created_by: user, - upload_type: upload_type, - key: result["key"], - multipart: true - ) - expect(external_upload_stub.exists?).to eq(true) - end - - context "when backup of same filename already exists" do - let(:backup_file_exists_response) { { status: 200, body: "" } } - - it "throws an error" do - post "/uploads/create-multipart.json", params: { - file_name: "test.tar.gz", - upload_type: upload_type, - file_size: 4098 - } - expect(response.status).to eq(422) - expect(response.parsed_body["errors"]).to include( - I18n.t("backup.file_exists") - ) - end - end - end - end end context "when the store is not external" do @@ -1127,7 +1043,7 @@ describe UploadsController do RateLimiter.enable RateLimiter.clear_all! - stub_const(UploadsController, "BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE", 1) do + stub_const(ExternalUploadHelpers, "BATCH_PRESIGN_RATE_LIMIT_PER_MINUTE", 1) do stub_list_multipart_request post "/uploads/batch-presign-multipart-parts.json", params: { unique_identifier: external_upload_stub.unique_identifier, @@ -1316,7 +1232,7 @@ describe UploadsController do RateLimiter.enable RateLimiter.clear_all! - stub_const(UploadsController, "COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do + stub_const(ExternalUploadHelpers, "COMPLETE_MULTIPART_RATE_LIMIT_PER_MINUTE", 1) do post "/uploads/complete-multipart.json", params: { unique_identifier: "blah", parts: [{ part_number: 1, etag: "test1" }, { part_number: 2, etag: "test2" }] From eb82849ccbe2808d06fcc80bfce1f868cdf0ec8b Mon Sep 17 00:00:00 2001 From: Krzysztof Kotlarek Date: Thu, 18 Nov 2021 10:21:34 +1100 Subject: [PATCH 006/168] FIX: none and all categories filter (#14999) parentCategory is passed to drop-category componen: https://github.com/discourse/discourse/blob/main/app/assets/javascripts/discourse/app/templates/components/bread-crumbs.hbs#L11 However, it is not available if it is not explicitly allow listed in selectKitOptions --- .../javascripts/select-kit/addon/components/category-drop.js | 1 + 1 file changed, 1 insertion(+) diff --git a/app/assets/javascripts/select-kit/addon/components/category-drop.js b/app/assets/javascripts/select-kit/addon/components/category-drop.js index 7723bdc2c1..5b19218fd6 100644 --- a/app/assets/javascripts/select-kit/addon/components/category-drop.js +++ b/app/assets/javascripts/select-kit/addon/components/category-drop.js @@ -38,6 +38,7 @@ export default ComboBoxComponent.extend({ autoInsertNoneItem: false, displayCategoryDescription: "displayCategoryDescription", headerComponent: "category-drop/category-drop-header", + parentCategory: false, }, modifyComponentForRow() { From db24c9b94e7301e805ff3ec11562d4666809b0b8 Mon Sep 17 00:00:00 2001 From: Alan Guo Xiang Tan Date: Thu, 18 Nov 2021 09:19:00 +0800 Subject: [PATCH 007/168] FIX: Incorrect title and chevron when filtering by post number. (#14985) The widget's state did not reflect the state of the controller. --- .../javascripts/discourse/app/widgets/post.js | 16 ++++++++++-- .../discourse/tests/acceptance/topic-test.js | 25 +++++++++++++++++++ 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/app/assets/javascripts/discourse/app/widgets/post.js b/app/assets/javascripts/discourse/app/widgets/post.js index 3f3a0ceadc..0e6a904509 100644 --- a/app/assets/javascripts/discourse/app/widgets/post.js +++ b/app/assets/javascripts/discourse/app/widgets/post.js @@ -388,8 +388,20 @@ createWidget("post-group-request", { createWidget("post-contents", { buildKey: (attrs) => `post-contents-${attrs.id}`, - defaultState() { - return { expandedFirstPost: false, repliesBelow: [] }; + defaultState(attrs) { + const defaultState = { + expandedFirstPost: false, + repliesBelow: [], + }; + + if (this.siteSettings.enable_filtered_replies_view) { + const topicController = this.register.lookup("controller:topic"); + + defaultState.filteredRepliesShown = + topicController.replies_to_post_number === attrs.post_number.toString(); + } + + return defaultState; }, buildClasses(attrs) { diff --git a/app/assets/javascripts/discourse/tests/acceptance/topic-test.js b/app/assets/javascripts/discourse/tests/acceptance/topic-test.js index 525695f174..cbe8c5c739 100644 --- a/app/assets/javascripts/discourse/tests/acceptance/topic-test.js +++ b/app/assets/javascripts/discourse/tests/acceptance/topic-test.js @@ -546,3 +546,28 @@ acceptance("Topic last visit line", function (needs) { ); }); }); + +acceptance("Topic filter replies to post number", function (needs) { + needs.settings({ + enable_filtered_replies_view: true, + }); + + test("visit topic", async function (assert) { + await visit("/t/-/280"); + + assert.equal( + query("#post_3 .show-replies").title, + I18n.t("post.filtered_replies_hint", { count: 3 }), + "it displays the right title for filtering by replies" + ); + + await visit("/"); + await visit("/t/-/280?replies_to_post_number=3"); + + assert.equal( + query("#post_3 .show-replies").title, + I18n.t("post.view_all_posts"), + "it displays the right title when filtered by replies" + ); + }); +}); From 20f5474be96504eb94937f4be1ea6339f3fed12f Mon Sep 17 00:00:00 2001 From: Penar Musaraj Date: Wed, 17 Nov 2021 20:21:12 -0500 Subject: [PATCH 008/168] FEATURE: Log only topic/post search queries in search log (#14994) --- lib/search.rb | 8 +++++++- spec/requests/search_controller_spec.rb | 7 +++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/lib/search.rb b/lib/search.rb index def3be0fd7..6230955b6a 100644 --- a/lib/search.rb +++ b/lib/search.rb @@ -252,7 +252,7 @@ class Search # Query a term def execute(readonly_mode: Discourse.readonly_mode?) - if SiteSetting.log_search_queries? && @opts[:search_type].present? && !readonly_mode + if log_query?(readonly_mode) status, search_log_id = SearchLog.log( term: @term, search_type: @opts[:search_type], @@ -1294,4 +1294,10 @@ class Search end end + def log_query?(readonly_mode) + SiteSetting.log_search_queries? && + @opts[:search_type].present? && + !readonly_mode && + @opts[:type_filter] != "exclude_topics" + end end diff --git a/spec/requests/search_controller_spec.rb b/spec/requests/search_controller_spec.rb index be85a71a35..a17f7d3aeb 100644 --- a/spec/requests/search_controller_spec.rb +++ b/spec/requests/search_controller_spec.rb @@ -231,6 +231,13 @@ describe SearchController do expect(SearchLog.where(term: 'wookie')).to be_blank end + it "doesn't log when filtering by exclude_topics" do + SiteSetting.log_search_queries = true + get "/search/query.json", params: { term: 'boop', type_filter: 'exclude_topics' } + expect(response.status).to eq(200) + expect(SearchLog.where(term: 'boop')).to be_blank + end + it "does not raise 500 with an empty term" do get "/search/query.json", params: { term: "in:first", type_filter: "topic", search_for_id: true } expect(response.status).to eq(200) From fc1c76cfccef50ae7bb84ea0d5b7bbdf92f7ee33 Mon Sep 17 00:00:00 2001 From: Vinoth Kannan Date: Thu, 18 Nov 2021 13:42:03 +0530 Subject: [PATCH 009/168] FIX: exclude moderator_action post for reply count in user summary. (#14991) Previously, incorrect reply counts are displayed in the "top categories" section of the user summary page since we included the `moderator_action` and `small_action` post types. Co-authored-by: Alan Guo Xiang Tan --- app/models/topic.rb | 5 +++-- app/models/user_summary.rb | 31 ++++++++++++------------------- spec/models/user_summary_spec.rb | 15 +++++++++++++++ 3 files changed, 30 insertions(+), 21 deletions(-) diff --git a/app/models/topic.rb b/app/models/topic.rb index db9dd21d3c..c9713bef3e 100644 --- a/app/models/topic.rb +++ b/app/models/topic.rb @@ -392,9 +392,10 @@ class Topic < ActiveRecord::Base end end - def self.visible_post_types(viewed_by = nil) + def self.visible_post_types(viewed_by = nil, include_moderator_actions: true) types = Post.types - result = [types[:regular], types[:moderator_action], types[:small_action]] + result = [types[:regular]] + result += [types[:moderator_action], types[:small_action]] if include_moderator_actions result << types[:whisper] if viewed_by&.staff? result end diff --git a/app/models/user_summary.rb b/app/models/user_summary.rb index 1cd0b02068..f30507117d 100644 --- a/app/models/user_summary.rb +++ b/app/models/user_summary.rb @@ -25,12 +25,7 @@ class UserSummary end def replies - Post - .joins(:topic) - .includes(:topic) - .secured(@guardian) - .merge(Topic.listable_topics.visible.secured(@guardian)) - .where(user: @user) + post_query .where('post_number > 1') .order('posts.like_count DESC, posts.created_at DESC') .limit(MAX_SUMMARY_RESULTS) @@ -88,13 +83,8 @@ class UserSummary def most_replied_to_users replied_users = {} - Post - .joins(:topic) + post_query .joins('JOIN posts replies ON posts.topic_id = replies.topic_id AND posts.reply_to_post_number = replies.post_number') - .includes(:topic) - .secured(@guardian) - .merge(Topic.listable_topics.visible.secured(@guardian)) - .where(user: @user) .where('replies.user_id <> ?', @user.id) .group('replies.user_id') .order('COUNT(*) DESC') @@ -135,13 +125,7 @@ class UserSummary end def top_categories - post_count_query = Post - .joins(:topic) - .includes(:topic) - .secured(@guardian) - .merge(Topic.listable_topics.visible.secured(@guardian)) - .where(user: @user) - .group('topics.category_id') + post_count_query = post_query.group('topics.category_id') top_categories = {} @@ -211,4 +195,13 @@ protected end.compact.sort_by { |u| -u[:count] } end + def post_query + Post + .joins(:topic) + .includes(:topic) + .where('posts.post_type IN (?)', Topic.visible_post_types(@guardian&.user, include_moderator_actions: false)) + .merge(Topic.listable_topics.visible.secured(@guardian)) + .where(user: @user) + end + end diff --git a/spec/models/user_summary_spec.rb b/spec/models/user_summary_spec.rb index 8847fc2a3a..4126632075 100644 --- a/spec/models/user_summary_spec.rb +++ b/spec/models/user_summary_spec.rb @@ -78,4 +78,19 @@ describe UserSummary do expect(summary.top_categories.length).to eq(UserSummary::MAX_SUMMARY_RESULTS) expect(summary.top_categories.first[:id]).to eq(top_category.id) end + + it "excludes moderator action posts" do + topic = create_post.topic + user = topic.user + create_post(user: user, topic: topic) + Fabricate(:small_action, topic: topic, user: user) + + summary = UserSummary.new(user, Guardian.new) + + expect(summary.topics.length).to eq(1) + expect(summary.replies.length).to eq(1) + expect(summary.top_categories.length).to eq(1) + expect(summary.top_categories.first[:topic_count]).to eq(1) + expect(summary.top_categories.first[:post_count]).to eq(1) + end end From 19627eca4ba9f106f8ed074d06b32c00c2ad092a Mon Sep 17 00:00:00 2001 From: Kris Date: Thu, 18 Nov 2021 09:46:59 -0500 Subject: [PATCH 010/168] DEV: add outlet for extra categories column (#15002) --- .../app/templates/components/categories-and-latest-topics.hbs | 2 ++ .../app/templates/components/categories-and-top-topics.hbs | 2 ++ 2 files changed, 4 insertions(+) diff --git a/app/assets/javascripts/discourse/app/templates/components/categories-and-latest-topics.hbs b/app/assets/javascripts/discourse/app/templates/components/categories-and-latest-topics.hbs index 29c22f16fa..4b6ffd510d 100644 --- a/app/assets/javascripts/discourse/app/templates/components/categories-and-latest-topics.hbs +++ b/app/assets/javascripts/discourse/app/templates/components/categories-and-latest-topics.hbs @@ -5,3 +5,5 @@
{{categories-topic-list topics=topics filter="latest" class="latest-topic-list"}}
+ +{{plugin-outlet name="extra-categories-column" tagName=""}} diff --git a/app/assets/javascripts/discourse/app/templates/components/categories-and-top-topics.hbs b/app/assets/javascripts/discourse/app/templates/components/categories-and-top-topics.hbs index ebb4a35781..66cd56ff2e 100644 --- a/app/assets/javascripts/discourse/app/templates/components/categories-and-top-topics.hbs +++ b/app/assets/javascripts/discourse/app/templates/components/categories-and-top-topics.hbs @@ -5,3 +5,5 @@
{{categories-topic-list topics=topics filter="top" class="top-topic-list"}}
+ +{{plugin-outlet name="extra-categories-column" tagName=""}} From 42fff92d9fe96bd22bcfe435238dfa0c0cde8fef Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Thu, 18 Nov 2021 16:51:20 +0100 Subject: [PATCH 011/168] DEV: Make `store` an actual service (#14996) `store:main` was unofficially deprecated 4 years ago in https://github.com/discourse/discourse/commit/fbd5f1e4110059726118a3728141753848d10c19#diff-b19dd1d6a5c7938fda9ae317136bbbb82339946ab457c9b95af936a47276c3ddR22 --- .../javascripts/discourse/app/models/store.js | 388 +----------------- .../javascripts/discourse/app/models/topic.js | 2 +- .../inject-discourse-objects.js | 15 +- .../discourse/app/services/store.js | 384 +++++++++++++++++ .../discourse/tests/helpers/create-store.js | 2 +- .../discourse/tests/helpers/qunit-helpers.js | 2 +- .../discourse/tests/setup-tests.js | 2 +- 7 files changed, 406 insertions(+), 389 deletions(-) create mode 100644 app/assets/javascripts/discourse/app/services/store.js diff --git a/app/assets/javascripts/discourse/app/models/store.js b/app/assets/javascripts/discourse/app/models/store.js index ffac278f1c..ca01224fb8 100644 --- a/app/assets/javascripts/discourse/app/models/store.js +++ b/app/assets/javascripts/discourse/app/models/store.js @@ -1,383 +1,9 @@ -import EmberObject, { set } from "@ember/object"; -import { Promise } from "rsvp"; -import RestModel from "discourse/models/rest"; -import ResultSet from "discourse/models/result-set"; -import { ajax } from "discourse/lib/ajax"; -import { getRegister } from "discourse-common/lib/get-owner"; -import { underscore } from "@ember/string"; +import deprecated from "discourse-common/lib/deprecated"; +export { default, flushMap } from "discourse/services/store"; -let _identityMap; - -// You should only call this if you're a test scaffold -function flushMap() { - _identityMap = {}; -} - -function storeMap(type, id, obj) { - if (!id) { - return; +deprecated( + `"discourse/models/store" import is deprecated, use "discourse/services/store" instead`, + { + since: "2.8.0.beta8", } - - _identityMap[type] = _identityMap[type] || {}; - _identityMap[type][id] = obj; -} - -function fromMap(type, id) { - const byType = _identityMap[type]; - if (byType && byType.hasOwnProperty(id)) { - return byType[id]; - } -} - -function removeMap(type, id) { - const byType = _identityMap[type]; - if (byType && byType.hasOwnProperty(id)) { - delete byType[id]; - } -} - -function findAndRemoveMap(type, id) { - const byType = _identityMap[type]; - if (byType && byType.hasOwnProperty(id)) { - const result = byType[id]; - delete byType[id]; - return result; - } -} - -flushMap(); - -export default EmberObject.extend({ - _plurals: { - category: "categories", - "post-reply": "post-replies", - "post-reply-history": "post_reply_histories", - reviewable_history: "reviewable_histories", - }, - - init() { - this._super(...arguments); - this.register = this.register || getRegister(this); - }, - - pluralize(thing) { - return this._plurals[thing] || thing + "s"; - }, - - addPluralization(thing, plural) { - this._plurals[thing] = plural; - }, - - findAll(type, findArgs) { - const adapter = this.adapterFor(type); - - let store = this; - return adapter.findAll(this, type, findArgs).then((result) => { - let results = this._resultSet(type, result); - if (adapter.afterFindAll) { - results = adapter.afterFindAll(results, { - lookup(subType, id) { - return store._lookupSubType(subType, type, id, result); - }, - }); - } - return results; - }); - }, - - // Mostly for legacy, things like TopicList without ResultSets - findFiltered(type, findArgs) { - return this.adapterFor(type) - .find(this, type, findArgs) - .then((result) => this._build(type, result)); - }, - - _hydrateFindResults(result, type, findArgs) { - if (typeof findArgs === "object") { - return this._resultSet(type, result, findArgs); - } else { - const apiName = this.adapterFor(type).apiNameFor(type); - return this._hydrate(type, result[underscore(apiName)], result); - } - }, - - // See if the store can find stale data. We sometimes prefer to show stale data and - // refresh it in the background. - findStale(type, findArgs, opts) { - const stale = this.adapterFor(type).findStale(this, type, findArgs, opts); - return { - hasResults: stale !== undefined, - results: stale, - refresh: () => this.find(type, findArgs, opts), - }; - }, - - find(type, findArgs, opts) { - let adapter = this.adapterFor(type); - return adapter.find(this, type, findArgs, opts).then((result) => { - let hydrated = this._hydrateFindResults(result, type, findArgs, opts); - - if (result.extras) { - hydrated.set("extras", result.extras); - } - - if (adapter.cache) { - const stale = adapter.findStale(this, type, findArgs, opts); - hydrated = this._updateStale(stale, hydrated, adapter.primaryKey); - adapter.cacheFind(this, type, findArgs, opts, hydrated); - } - return hydrated; - }); - }, - - _updateStale(stale, hydrated, primaryKey) { - if (!stale) { - return hydrated; - } - - hydrated.set( - "content", - hydrated.get("content").map((item) => { - let staleItem = stale.content.findBy(primaryKey, item.get(primaryKey)); - if (staleItem) { - staleItem.setProperties(item); - } else { - staleItem = item; - } - return staleItem; - }) - ); - return hydrated; - }, - - refreshResults(resultSet, type, url) { - const adapter = this.adapterFor(type); - return ajax(url).then((result) => { - const typeName = underscore(this.pluralize(adapter.apiNameFor(type))); - const content = result[typeName].map((obj) => - this._hydrate(type, obj, result) - ); - resultSet.set("content", content); - }); - }, - - appendResults(resultSet, type, url) { - const adapter = this.adapterFor(type); - return ajax(url).then((result) => { - const typeName = underscore(this.pluralize(adapter.apiNameFor(type))); - - let pageTarget = result.meta || result; - let totalRows = - pageTarget["total_rows_" + typeName] || resultSet.get("totalRows"); - let loadMoreUrl = pageTarget["load_more_" + typeName]; - let content = result[typeName].map((obj) => - this._hydrate(type, obj, result) - ); - - resultSet.setProperties({ totalRows, loadMoreUrl }); - resultSet.get("content").pushObjects(content); - - // If we've loaded them all, clear the load more URL - if (resultSet.get("length") >= totalRows) { - resultSet.set("loadMoreUrl", null); - } - }); - }, - - update(type, id, attrs) { - const adapter = this.adapterFor(type); - return adapter.update(this, type, id, attrs, function (result) { - if (result && result[type] && result[type][adapter.primaryKey]) { - const oldRecord = findAndRemoveMap(type, id); - storeMap(type, result[type][adapter.primaryKey], oldRecord); - } - return result; - }); - }, - - createRecord(type, attrs) { - attrs = attrs || {}; - const adapter = this.adapterFor(type); - return !!attrs[adapter.primaryKey] - ? this._hydrate(type, attrs) - : this._build(type, attrs); - }, - - destroyRecord(type, record) { - const adapter = this.adapterFor(type); - - // If the record is new, don't perform an Ajax call - if (record.get("isNew")) { - removeMap(type, record.get(adapter.primaryKey)); - return Promise.resolve(true); - } - - return adapter.destroyRecord(this, type, record).then(function (result) { - removeMap(type, record.get(adapter.primaryKey)); - return result; - }); - }, - - _resultSet(type, result, findArgs) { - const adapter = this.adapterFor(type); - const typeName = underscore(this.pluralize(adapter.apiNameFor(type))); - - if (!result[typeName]) { - // eslint-disable-next-line no-console - console.error(`JSON response is missing \`${typeName}\` key`, result); - return; - } - - const content = result[typeName].map((obj) => - this._hydrate(type, obj, result) - ); - - let pageTarget = result.meta || result; - - const createArgs = { - content, - findArgs, - totalRows: pageTarget["total_rows_" + typeName] || content.length, - loadMoreUrl: pageTarget["load_more_" + typeName], - refreshUrl: pageTarget["refresh_" + typeName], - resultSetMeta: result.meta, - store: this, - __type: type, - }; - - if (result.extras) { - createArgs.extras = result.extras; - } - - return ResultSet.create(createArgs); - }, - - _build(type, obj) { - const adapter = this.adapterFor(type); - obj.store = this; - obj.__type = type; - obj.__state = obj[adapter.primaryKey] ? "created" : "new"; - - // TODO: Have injections be automatic - obj.topicTrackingState = this.register.lookup("topic-tracking-state:main"); - obj.keyValueStore = this.register.lookup("key-value-store:main"); - - const klass = this.register.lookupFactory("model:" + type) || RestModel; - const model = klass.create(obj); - - storeMap(type, obj[adapter.primaryKey], model); - return model; - }, - - adapterFor(type) { - return ( - this.register.lookup("adapter:" + type) || - this.register.lookup("adapter:rest") - ); - }, - - _lookupSubType(subType, type, id, root) { - if (root.meta && root.meta.types) { - subType = root.meta.types[subType] || subType; - } - - const subTypeAdapter = this.adapterFor(subType); - const pluralType = this.pluralize(subType); - const collection = root[this.pluralize(subType)]; - if (collection) { - const hashedProp = "__hashed_" + pluralType; - let hashedCollection = root[hashedProp]; - if (!hashedCollection) { - hashedCollection = {}; - collection.forEach(function (it) { - hashedCollection[it[subTypeAdapter.primaryKey]] = it; - }); - root[hashedProp] = hashedCollection; - } - - const found = hashedCollection[id]; - if (found) { - const hydrated = this._hydrate(subType, found, root); - hashedCollection[id] = hydrated; - return hydrated; - } - } - }, - - _hydrateEmbedded(type, obj, root) { - const adapter = this.adapterFor(type); - Object.keys(obj).forEach((k) => { - if (k === adapter.primaryKey) { - return; - } - - const m = /(.+)\_id(s?)$/.exec(k); - if (m) { - const subType = m[1]; - - if (m[2]) { - const hydrated = obj[k].map((id) => - this._lookupSubType(subType, type, id, root) - ); - obj[this.pluralize(subType)] = hydrated || []; - delete obj[k]; - } else { - const hydrated = this._lookupSubType(subType, type, obj[k], root); - if (hydrated) { - obj[subType] = hydrated; - delete obj[k]; - } else { - set(obj, subType, null); - } - } - } - }); - }, - - _hydrate(type, obj, root) { - if (!obj) { - throw new Error("Can't hydrate " + type + " of `null`"); - } - - const adapter = this.adapterFor(type); - - const id = obj[adapter.primaryKey]; - if (!id) { - throw new Error( - `Can't hydrate ${type} without primaryKey: \`${adapter.primaryKey}\`` - ); - } - - root = root || obj; - - if (root.__rest_serializer === "1") { - this._hydrateEmbedded(type, obj, root); - } - - const existing = fromMap(type, id); - if (existing === obj) { - return existing; - } - - if (existing) { - delete obj[adapter.primaryKey]; - let klass = this.register.lookupFactory("model:" + type); - - if (klass && klass.class) { - klass = klass.class; - } - - if (!klass) { - klass = RestModel; - } - - existing.setProperties(klass.munge(obj)); - obj[adapter.primaryKey] = id; - return existing; - } - - return this._build(type, obj); - }, -}); - -export { flushMap }; +); diff --git a/app/assets/javascripts/discourse/app/models/topic.js b/app/assets/javascripts/discourse/app/models/topic.js index 6883581b50..d5ed11eb76 100644 --- a/app/assets/javascripts/discourse/app/models/topic.js +++ b/app/assets/javascripts/discourse/app/models/topic.js @@ -15,7 +15,7 @@ import { deepMerge } from "discourse-common/lib/object"; import discourseComputed from "discourse-common/utils/decorators"; import { emojiUnescape } from "discourse/lib/text"; import { fancyTitle } from "discourse/lib/topic-fancy-title"; -import { flushMap } from "discourse/models/store"; +import { flushMap } from "discourse/services/store"; import getURL from "discourse-common/lib/get-url"; import { longDate } from "discourse/lib/formatter"; import { popupAjaxError } from "discourse/lib/ajax-error"; diff --git a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js index 5fa2e380ba..e60955dd3f 100644 --- a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js +++ b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js @@ -8,8 +8,8 @@ import MessageBus from "message-bus-client"; import SearchService from "discourse/services/search"; import Session from "discourse/models/session"; import Site from "discourse/models/site"; -import Store from "discourse/models/store"; import User from "discourse/models/user"; +import deprecated from "discourse-common/lib/deprecated"; const ALL_TARGETS = ["controller", "component", "route", "model", "adapter"]; @@ -20,9 +20,6 @@ export function registerObjects(app) { } app.__registeredObjects__ = true; - app.register("store:main", Store); - app.register("service:store", Store); - // TODO: This should be included properly app.register("message-bus:main", MessageBus, { instantiate: false }); @@ -37,6 +34,16 @@ export default { initialize(container, app) { registerObjects(app); + app.register("store:main", { + create() { + deprecated(`"store:main" is deprecated, use "service:store" instead`, { + since: "2.8.0.beta8", + }); + + return container.lookup("service:store"); + }, + }); + let siteSettings = container.lookup("site-settings:main"); const currentUser = User.current(); diff --git a/app/assets/javascripts/discourse/app/services/store.js b/app/assets/javascripts/discourse/app/services/store.js new file mode 100644 index 0000000000..4eaf4b5211 --- /dev/null +++ b/app/assets/javascripts/discourse/app/services/store.js @@ -0,0 +1,384 @@ +import Service from "@ember/service"; +import { set } from "@ember/object"; +import { Promise } from "rsvp"; +import RestModel from "discourse/models/rest"; +import ResultSet from "discourse/models/result-set"; +import { ajax } from "discourse/lib/ajax"; +import { getRegister } from "discourse-common/lib/get-owner"; +import { underscore } from "@ember/string"; + +let _identityMap; + +// You should only call this if you're a test scaffold +function flushMap() { + _identityMap = {}; +} + +function storeMap(type, id, obj) { + if (!id) { + return; + } + + _identityMap[type] = _identityMap[type] || {}; + _identityMap[type][id] = obj; +} + +function fromMap(type, id) { + const byType = _identityMap[type]; + if (byType && byType.hasOwnProperty(id)) { + return byType[id]; + } +} + +function removeMap(type, id) { + const byType = _identityMap[type]; + if (byType && byType.hasOwnProperty(id)) { + delete byType[id]; + } +} + +function findAndRemoveMap(type, id) { + const byType = _identityMap[type]; + if (byType && byType.hasOwnProperty(id)) { + const result = byType[id]; + delete byType[id]; + return result; + } +} + +flushMap(); + +export default Service.extend({ + _plurals: { + category: "categories", + "post-reply": "post-replies", + "post-reply-history": "post_reply_histories", + reviewable_history: "reviewable_histories", + }, + + init() { + this._super(...arguments); + this.register = this.register || getRegister(this); + }, + + pluralize(thing) { + return this._plurals[thing] || thing + "s"; + }, + + addPluralization(thing, plural) { + this._plurals[thing] = plural; + }, + + findAll(type, findArgs) { + const adapter = this.adapterFor(type); + + let store = this; + return adapter.findAll(this, type, findArgs).then((result) => { + let results = this._resultSet(type, result); + if (adapter.afterFindAll) { + results = adapter.afterFindAll(results, { + lookup(subType, id) { + return store._lookupSubType(subType, type, id, result); + }, + }); + } + return results; + }); + }, + + // Mostly for legacy, things like TopicList without ResultSets + findFiltered(type, findArgs) { + return this.adapterFor(type) + .find(this, type, findArgs) + .then((result) => this._build(type, result)); + }, + + _hydrateFindResults(result, type, findArgs) { + if (typeof findArgs === "object") { + return this._resultSet(type, result, findArgs); + } else { + const apiName = this.adapterFor(type).apiNameFor(type); + return this._hydrate(type, result[underscore(apiName)], result); + } + }, + + // See if the store can find stale data. We sometimes prefer to show stale data and + // refresh it in the background. + findStale(type, findArgs, opts) { + const stale = this.adapterFor(type).findStale(this, type, findArgs, opts); + return { + hasResults: stale !== undefined, + results: stale, + refresh: () => this.find(type, findArgs, opts), + }; + }, + + find(type, findArgs, opts) { + let adapter = this.adapterFor(type); + return adapter.find(this, type, findArgs, opts).then((result) => { + let hydrated = this._hydrateFindResults(result, type, findArgs, opts); + + if (result.extras) { + hydrated.set("extras", result.extras); + } + + if (adapter.cache) { + const stale = adapter.findStale(this, type, findArgs, opts); + hydrated = this._updateStale(stale, hydrated, adapter.primaryKey); + adapter.cacheFind(this, type, findArgs, opts, hydrated); + } + return hydrated; + }); + }, + + _updateStale(stale, hydrated, primaryKey) { + if (!stale) { + return hydrated; + } + + hydrated.set( + "content", + hydrated.get("content").map((item) => { + let staleItem = stale.content.findBy(primaryKey, item.get(primaryKey)); + if (staleItem) { + staleItem.setProperties(item); + } else { + staleItem = item; + } + return staleItem; + }) + ); + return hydrated; + }, + + refreshResults(resultSet, type, url) { + const adapter = this.adapterFor(type); + return ajax(url).then((result) => { + const typeName = underscore(this.pluralize(adapter.apiNameFor(type))); + const content = result[typeName].map((obj) => + this._hydrate(type, obj, result) + ); + resultSet.set("content", content); + }); + }, + + appendResults(resultSet, type, url) { + const adapter = this.adapterFor(type); + return ajax(url).then((result) => { + const typeName = underscore(this.pluralize(adapter.apiNameFor(type))); + + let pageTarget = result.meta || result; + let totalRows = + pageTarget["total_rows_" + typeName] || resultSet.get("totalRows"); + let loadMoreUrl = pageTarget["load_more_" + typeName]; + let content = result[typeName].map((obj) => + this._hydrate(type, obj, result) + ); + + resultSet.setProperties({ totalRows, loadMoreUrl }); + resultSet.get("content").pushObjects(content); + + // If we've loaded them all, clear the load more URL + if (resultSet.get("length") >= totalRows) { + resultSet.set("loadMoreUrl", null); + } + }); + }, + + update(type, id, attrs) { + const adapter = this.adapterFor(type); + return adapter.update(this, type, id, attrs, function (result) { + if (result && result[type] && result[type][adapter.primaryKey]) { + const oldRecord = findAndRemoveMap(type, id); + storeMap(type, result[type][adapter.primaryKey], oldRecord); + } + return result; + }); + }, + + createRecord(type, attrs) { + attrs = attrs || {}; + const adapter = this.adapterFor(type); + return !!attrs[adapter.primaryKey] + ? this._hydrate(type, attrs) + : this._build(type, attrs); + }, + + destroyRecord(type, record) { + const adapter = this.adapterFor(type); + + // If the record is new, don't perform an Ajax call + if (record.get("isNew")) { + removeMap(type, record.get(adapter.primaryKey)); + return Promise.resolve(true); + } + + return adapter.destroyRecord(this, type, record).then(function (result) { + removeMap(type, record.get(adapter.primaryKey)); + return result; + }); + }, + + _resultSet(type, result, findArgs) { + const adapter = this.adapterFor(type); + const typeName = underscore(this.pluralize(adapter.apiNameFor(type))); + + if (!result[typeName]) { + // eslint-disable-next-line no-console + console.error(`JSON response is missing \`${typeName}\` key`, result); + return; + } + + const content = result[typeName].map((obj) => + this._hydrate(type, obj, result) + ); + + let pageTarget = result.meta || result; + + const createArgs = { + content, + findArgs, + totalRows: pageTarget["total_rows_" + typeName] || content.length, + loadMoreUrl: pageTarget["load_more_" + typeName], + refreshUrl: pageTarget["refresh_" + typeName], + resultSetMeta: result.meta, + store: this, + __type: type, + }; + + if (result.extras) { + createArgs.extras = result.extras; + } + + return ResultSet.create(createArgs); + }, + + _build(type, obj) { + const adapter = this.adapterFor(type); + obj.store = this; + obj.__type = type; + obj.__state = obj[adapter.primaryKey] ? "created" : "new"; + + // TODO: Have injections be automatic + obj.topicTrackingState = this.register.lookup("topic-tracking-state:main"); + obj.keyValueStore = this.register.lookup("key-value-store:main"); + + const klass = this.register.lookupFactory("model:" + type) || RestModel; + const model = klass.create(obj); + + storeMap(type, obj[adapter.primaryKey], model); + return model; + }, + + adapterFor(type) { + return ( + this.register.lookup("adapter:" + type) || + this.register.lookup("adapter:rest") + ); + }, + + _lookupSubType(subType, type, id, root) { + if (root.meta && root.meta.types) { + subType = root.meta.types[subType] || subType; + } + + const subTypeAdapter = this.adapterFor(subType); + const pluralType = this.pluralize(subType); + const collection = root[this.pluralize(subType)]; + if (collection) { + const hashedProp = "__hashed_" + pluralType; + let hashedCollection = root[hashedProp]; + if (!hashedCollection) { + hashedCollection = {}; + collection.forEach(function (it) { + hashedCollection[it[subTypeAdapter.primaryKey]] = it; + }); + root[hashedProp] = hashedCollection; + } + + const found = hashedCollection[id]; + if (found) { + const hydrated = this._hydrate(subType, found, root); + hashedCollection[id] = hydrated; + return hydrated; + } + } + }, + + _hydrateEmbedded(type, obj, root) { + const adapter = this.adapterFor(type); + Object.keys(obj).forEach((k) => { + if (k === adapter.primaryKey) { + return; + } + + const m = /(.+)\_id(s?)$/.exec(k); + if (m) { + const subType = m[1]; + + if (m[2]) { + const hydrated = obj[k].map((id) => + this._lookupSubType(subType, type, id, root) + ); + obj[this.pluralize(subType)] = hydrated || []; + delete obj[k]; + } else { + const hydrated = this._lookupSubType(subType, type, obj[k], root); + if (hydrated) { + obj[subType] = hydrated; + delete obj[k]; + } else { + set(obj, subType, null); + } + } + } + }); + }, + + _hydrate(type, obj, root) { + if (!obj) { + throw new Error("Can't hydrate " + type + " of `null`"); + } + + const adapter = this.adapterFor(type); + + const id = obj[adapter.primaryKey]; + if (!id) { + throw new Error( + `Can't hydrate ${type} without primaryKey: \`${adapter.primaryKey}\`` + ); + } + + root = root || obj; + + if (root.__rest_serializer === "1") { + this._hydrateEmbedded(type, obj, root); + } + + const existing = fromMap(type, id); + if (existing === obj) { + return existing; + } + + if (existing) { + delete obj[adapter.primaryKey]; + let klass = this.register.lookupFactory("model:" + type); + + if (klass && klass.class) { + klass = klass.class; + } + + if (!klass) { + klass = RestModel; + } + + existing.setProperties(klass.munge(obj)); + obj[adapter.primaryKey] = id; + return existing; + } + + return this._build(type, obj); + }, +}); + +export { flushMap }; diff --git a/app/assets/javascripts/discourse/tests/helpers/create-store.js b/app/assets/javascripts/discourse/tests/helpers/create-store.js index ad15ffd4fa..93a8520d6f 100644 --- a/app/assets/javascripts/discourse/tests/helpers/create-store.js +++ b/app/assets/javascripts/discourse/tests/helpers/create-store.js @@ -1,6 +1,6 @@ import KeyValueStore from "discourse/lib/key-value-store"; import RestAdapter from "discourse/adapters/rest"; -import Store from "discourse/models/store"; +import Store from "discourse/services/store"; import TopicListAdapter from "discourse/adapters/topic-list"; import TopicTrackingState from "discourse/models/topic-tracking-state"; import { buildResolver } from "discourse-common/resolver"; diff --git a/app/assets/javascripts/discourse/tests/helpers/qunit-helpers.js b/app/assets/javascripts/discourse/tests/helpers/qunit-helpers.js index 439751b7e2..84e9aa5aa0 100644 --- a/app/assets/javascripts/discourse/tests/helpers/qunit-helpers.js +++ b/app/assets/javascripts/discourse/tests/helpers/qunit-helpers.js @@ -21,7 +21,7 @@ import { _clearSnapshots } from "select-kit/components/composer-actions"; import { clearHTMLCache } from "discourse/helpers/custom-html"; import createStore from "discourse/tests/helpers/create-store"; import deprecated from "discourse-common/lib/deprecated"; -import { flushMap } from "discourse/models/store"; +import { flushMap } from "discourse/services/store"; import { initSearchData } from "discourse/widgets/search-menu"; import { resetPostMenuExtraButtons } from "discourse/widgets/post-menu"; import { isEmpty } from "@ember/utils"; diff --git a/app/assets/javascripts/discourse/tests/setup-tests.js b/app/assets/javascripts/discourse/tests/setup-tests.js index ec7f80b578..7091b47143 100644 --- a/app/assets/javascripts/discourse/tests/setup-tests.js +++ b/app/assets/javascripts/discourse/tests/setup-tests.js @@ -27,7 +27,7 @@ import bootbox from "bootbox"; import { buildResolver } from "discourse-common/resolver"; import { createHelperContext } from "discourse-common/lib/helpers"; import deprecated from "discourse-common/lib/deprecated"; -import { flushMap } from "discourse/models/store"; +import { flushMap } from "discourse/services/store"; import { registerObjects } from "discourse/pre-initializers/inject-discourse-objects"; import sinon from "sinon"; import { run } from "@ember/runloop"; From 40218426289932d18da84f717d1e7226371763ec Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Thu, 18 Nov 2021 16:52:03 +0100 Subject: [PATCH 012/168] DEV: Make `search` an actual service (#14998) --- .../discourse/app/lib/keyboard-shortcuts.js | 2 +- .../inject-discourse-objects.js | 17 ++++++++++++++--- .../discourse/app/services/search.js | 17 +++++++---------- .../javascripts/discourse/app/widgets/header.js | 8 +++----- .../app/widgets/search-menu-results.js | 5 ++--- .../discourse/app/widgets/search-menu.js | 12 +++--------- 6 files changed, 30 insertions(+), 31 deletions(-) diff --git a/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js b/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js index b97787fed5..93efb02416 100644 --- a/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js +++ b/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js @@ -104,7 +104,7 @@ export default { this.container = container; this._stopCallback(); - this.searchService = this.container.lookup("search-service:main"); + this.searchService = this.container.lookup("service:search"); this.appEvents = this.container.lookup("service:app-events"); this.currentUser = this.container.lookup("current-user:main"); this.siteSettings = this.container.lookup("site-settings:main"); diff --git a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js index e60955dd3f..2a92f321fb 100644 --- a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js +++ b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js @@ -5,7 +5,6 @@ import PrivateMessageTopicTrackingState from "discourse/models/private-message-t import DiscourseLocation from "discourse/lib/discourse-location"; import KeyValueStore from "discourse/lib/key-value-store"; import MessageBus from "message-bus-client"; -import SearchService from "discourse/services/search"; import Session from "discourse/models/session"; import Site from "discourse/models/site"; import User from "discourse/models/user"; @@ -79,14 +78,26 @@ export default { const keyValueStore = new KeyValueStore("discourse_"); app.register("key-value-store:main", keyValueStore, { instantiate: false }); - app.register("search-service:main", SearchService); + + app.register("search-service:main", { + create() { + deprecated( + `"search-service:main" is deprecated, use "service:search" instead`, + { + since: "2.8.0.beta8", + } + ); + + return container.lookup("service:search"); + }, + }); ALL_TARGETS.forEach((t) => { app.inject(t, "appEvents", "service:app-events"); app.inject(t, "pmTopicTrackingState", "pm-topic-tracking-state:main"); app.inject(t, "store", "service:store"); app.inject(t, "site", "site:main"); - app.inject(t, "searchService", "search-service:main"); + app.inject(t, "searchService", "service:search"); app.inject(t, "keyValueStore", "key-value-store:main"); }); diff --git a/app/assets/javascripts/discourse/app/services/search.js b/app/assets/javascripts/discourse/app/services/search.js index a702e9980b..2fe2dacaec 100644 --- a/app/assets/javascripts/discourse/app/services/search.js +++ b/app/assets/javascripts/discourse/app/services/search.js @@ -1,7 +1,7 @@ -import EmberObject, { get } from "@ember/object"; +import Service from "@ember/service"; import discourseComputed from "discourse-common/utils/decorators"; -export default EmberObject.extend({ +export default Service.extend({ searchContextEnabled: false, // checkbox to scope search searchContext: null, highlightTerm: null, @@ -9,16 +9,13 @@ export default EmberObject.extend({ @discourseComputed("searchContext") contextType: { get(searchContext) { - if (searchContext) { - return get(searchContext, "type"); - } + return searchContext?.type; }, + set(value, searchContext) { - // a bit hacky, consider cleaning this up, need to work through all observers though - const context = Object.assign({}, searchContext); - context.type = value; - this.set("searchContext", context); - return this.get("searchContext.type"); + this.set("searchContext", { ...searchContext, type: value }); + + return value; }, }, }); diff --git a/app/assets/javascripts/discourse/app/widgets/header.js b/app/assets/javascripts/discourse/app/widgets/header.js index 9cfd4091f6..377e76304d 100644 --- a/app/assets/javascripts/discourse/app/widgets/header.js +++ b/app/assets/javascripts/discourse/app/widgets/header.js @@ -328,7 +328,7 @@ export function attachAdditionalPanel(name, toggle, transformAttrs) { export default createWidget("header", { tagName: "header.d-header.clearfix", buildKey: () => `header`, - services: ["router"], + services: ["router", "search"], defaultState() { let states = { @@ -408,8 +408,7 @@ export default createWidget("header", { updateHighlight() { if (!this.state.searchVisible) { - const service = this.register.lookup("search-service:main"); - service.set("highlightTerm", ""); + this.search.set("highlightTerm", ""); } }, @@ -447,8 +446,7 @@ export default createWidget("header", { toggleSearchMenu() { if (this.site.mobileView) { - const searchService = this.register.lookup("search-service:main"); - const context = searchService.get("searchContext"); + const context = this.search.searchContext; let params = ""; if (context) { diff --git a/app/assets/javascripts/discourse/app/widgets/search-menu-results.js b/app/assets/javascripts/discourse/app/widgets/search-menu-results.js index f3f0b7af32..d607ad3de5 100644 --- a/app/assets/javascripts/discourse/app/widgets/search-menu-results.js +++ b/app/assets/javascripts/discourse/app/widgets/search-menu-results.js @@ -498,15 +498,14 @@ createWidget("search-menu-assistant", { createWidget("search-menu-initial-options", { tagName: "ul.search-menu-initial-options", + services: ["search"], html(attrs) { if (attrs.term?.match(MODIFIER_REGEXP)) { return this.defaultRow(attrs.term); } - const service = this.register.lookup("search-service:main"); - const ctx = service.get("searchContext"); - + const ctx = this.search.searchContext; const content = []; if (attrs.term || ctx) { diff --git a/app/assets/javascripts/discourse/app/widgets/search-menu.js b/app/assets/javascripts/discourse/app/widgets/search-menu.js index e5366f5be6..4b52448038 100644 --- a/app/assets/javascripts/discourse/app/widgets/search-menu.js +++ b/app/assets/javascripts/discourse/app/widgets/search-menu.js @@ -185,6 +185,7 @@ const SearchHelper = { export default createWidget("search-menu", { tagName: "div.search-menu", + services: ["search"], searchData, buildKey: () => "search-menu", @@ -306,13 +307,6 @@ export default createWidget("search-menu", { this.triggerSearch(); }, - searchService() { - if (!this._searchService) { - this._searchService = this.register.lookup("search-service:main"); - } - return this._searchService; - }, - html(attrs, state) { if (attrs.inTopicContext === false) { state.inTopicContext = false; @@ -450,7 +444,7 @@ export default createWidget("search-menu", { searchData.noResults = false; if (SearchHelper.includesTopics()) { if (this.state.inTopicContext) { - this.searchService().set("highlightTerm", searchData.term); + this.search.set("highlightTerm", searchData.term); } searchData.loading = true; @@ -500,7 +494,7 @@ export default createWidget("search-menu", { searchContext() { if (this.state.inTopicContext) { - return this.searchService().get("searchContext"); + return this.search.searchContext; } return false; From 82c72a46d1da20360235aed3f42b84119dd8b4cf Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Thu, 18 Nov 2021 16:52:14 +0100 Subject: [PATCH 013/168] DEV: Fix `castInteger` deprecations (#15004) --- .../components/groups-form-interaction-fields.hbs | 4 +++- .../discourse/app/templates/full-page-search.hbs | 8 ++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/app/assets/javascripts/discourse/app/templates/components/groups-form-interaction-fields.hbs b/app/assets/javascripts/discourse/app/templates/components/groups-form-interaction-fields.hbs index 7681b30a85..ad186711ea 100644 --- a/app/assets/javascripts/discourse/app/templates/components/groups-form-interaction-fields.hbs +++ b/app/assets/javascripts/discourse/app/templates/components/groups-form-interaction-fields.hbs @@ -8,9 +8,11 @@ valueProperty="value" value=model.visibility_level content=visibilityLevelOptions - castInteger=true class="groups-form-visibility-level" onChange=(action (mut model.visibility_level)) + options=(hash + castInteger=true + ) }}
diff --git a/app/assets/javascripts/discourse/app/templates/full-page-search.hbs b/app/assets/javascripts/discourse/app/templates/full-page-search.hbs index 4c9eee8c2a..795d559a1a 100644 --- a/app/assets/javascripts/discourse/app/templates/full-page-search.hbs +++ b/app/assets/javascripts/discourse/app/templates/full-page-search.hbs @@ -24,8 +24,10 @@ id="search-type" value=search_type content=searchTypes - castInteger=true onChange=(action (mut search_type)) + options=(hash + castInteger=true + ) }} {{d-button action=(action "search" (hash collapseFilters=true)) @@ -90,9 +92,11 @@ {{combo-box value=sortOrder content=sortOrders - castInteger=true onChange=(action (mut sortOrder)) id="search-sort-by" + options=(hash + castInteger=true + ) }}
From 8dc9e0f4bd172e91a343836768af96e2471ff38d Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Thu, 18 Nov 2021 16:54:47 +0100 Subject: [PATCH 014/168] DEV: Use `class` syntax in key-value-store (#15005) --- .../discourse/app/lib/key-value-store.js | 35 +++++++++++-------- .../tests/unit/lib/key-value-store-test.js | 27 +++++++++++--- 2 files changed, 44 insertions(+), 18 deletions(-) diff --git a/app/assets/javascripts/discourse/app/lib/key-value-store.js b/app/assets/javascripts/discourse/app/lib/key-value-store.js index 94c67018b9..1b67bf52a3 100644 --- a/app/assets/javascripts/discourse/app/lib/key-value-store.js +++ b/app/assets/javascripts/discourse/app/lib/key-value-store.js @@ -14,11 +14,13 @@ try { safeLocalStorage = null; } -const KeyValueStore = function (ctx) { - this.context = ctx; -}; +export default class KeyValueStore { + context = null; + + constructor(ctx) { + this.context = ctx; + } -KeyValueStore.prototype = { abandonLocal() { if (!safeLocalStorage) { return; @@ -32,57 +34,64 @@ KeyValueStore.prototype = { } i--; } + return true; - }, + } remove(key) { if (!safeLocalStorage) { return; } + return safeLocalStorage.removeItem(this.context + key); - }, + } set(opts) { if (!safeLocalStorage) { return false; } + safeLocalStorage[this.context + opts.key] = opts.value; - }, + } setObject(opts) { this.set({ key: opts.key, value: JSON.stringify(opts.value) }); - }, + } get(key) { if (!safeLocalStorage) { return null; } return safeLocalStorage[this.context + key]; - }, + } getInt(key, def) { if (!def) { def = 0; } + if (!safeLocalStorage) { return def; } + const result = parseInt(this.get(key), 10); if (!isFinite(result)) { return def; } + return result; - }, + } getObject(key) { if (!safeLocalStorage) { return null; } + try { return JSON.parse(safeLocalStorage[this.context + key]); } catch (e) {} - }, -}; + } +} // API compatibility with `localStorage` KeyValueStore.prototype.getItem = KeyValueStore.prototype.get; @@ -90,5 +99,3 @@ KeyValueStore.prototype.removeItem = KeyValueStore.prototype.remove; KeyValueStore.prototype.setItem = function (key, value) { this.set({ key, value }); }; - -export default KeyValueStore; diff --git a/app/assets/javascripts/discourse/tests/unit/lib/key-value-store-test.js b/app/assets/javascripts/discourse/tests/unit/lib/key-value-store-test.js index 243dc9a981..ed323d85b9 100644 --- a/app/assets/javascripts/discourse/tests/unit/lib/key-value-store-test.js +++ b/app/assets/javascripts/discourse/tests/unit/lib/key-value-store-test.js @@ -2,18 +2,37 @@ import { module, test } from "qunit"; import KeyValueStore from "discourse/lib/key-value-store"; module("Unit | Utility | key-value-store", function () { - test("it's able to get the result back from the store", function (assert) { - const store = new KeyValueStore("_test"); + test("is able to get the result back from the store", function (assert) { + const store = new KeyValueStore("test_"); store.set({ key: "bob", value: "uncle" }); + assert.strictEqual(store.get("bob"), "uncle"); }); + test("is able remove items from the store", function (assert) { + const store = new KeyValueStore("test_"); + store.set({ key: "bob", value: "uncle" }); + store.remove("bob"); + + assert.strictEqual(store.get("bob"), undefined); + }); + test("is able to nuke the store", function (assert) { - const store = new KeyValueStore("_test"); + const store = new KeyValueStore("test_"); store.set({ key: "bob1", value: "uncle" }); store.abandonLocal(); localStorage.a = 1; - assert.strictEqual(store.get("bob1"), void 0); + + assert.strictEqual(store.get("bob1"), undefined); assert.strictEqual(localStorage.a, "1"); }); + + test("is API-compatible with `localStorage`", function (assert) { + const store = new KeyValueStore("test_"); + store.setItem("bob", "uncle"); + assert.strictEqual(store.getItem("bob"), "uncle"); + + store.removeItem("bob"); + assert.strictEqual(store.getItem("bob"), undefined); + }); }); From a10267352220837c0298d84d399bb1c0b8874562 Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Thu, 18 Nov 2021 17:11:59 +0100 Subject: [PATCH 015/168] DEV: Avoid unnecessary `site-settings:main` lookups (#15006) --- .../admin/addon/services/admin-tools.js | 14 +++----------- .../discourse/app/lib/keyboard-shortcuts.js | 4 +--- 2 files changed, 4 insertions(+), 14 deletions(-) diff --git a/app/assets/javascripts/admin/addon/services/admin-tools.js b/app/assets/javascripts/admin/addon/services/admin-tools.js index fe282fefd6..a90df7de8e 100644 --- a/app/assets/javascripts/admin/addon/services/admin-tools.js +++ b/app/assets/javascripts/admin/addon/services/admin-tools.js @@ -1,8 +1,4 @@ import AdminUser from "admin/models/admin-user"; -// A service that can act as a bridge between the front end Discourse application -// and the admin application. Use this if you need front end code to access admin -// modules. Inject it optionally, and if it exists go to town! - import I18n from "I18n"; import { Promise } from "rsvp"; import Service from "@ember/service"; @@ -12,14 +8,10 @@ import { getOwner } from "discourse-common/lib/get-owner"; import { iconHTML } from "discourse-common/lib/icon-library"; import showModal from "discourse/lib/show-modal"; +// A service that can act as a bridge between the front end Discourse application +// and the admin application. Use this if you need front end code to access admin +// modules. Inject it optionally, and if it exists go to town! export default Service.extend({ - init() { - this._super(...arguments); - - // TODO: Make `siteSettings` a service that can be injected - this.siteSettings = getOwner(this).lookup("site-settings:main"); - }, - showActionLogs(target, filters) { const controller = getOwner(target).lookup( "controller:adminLogs.staffActionLogs" diff --git a/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js b/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js index 93efb02416..f3bd4367c3 100644 --- a/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js +++ b/app/assets/javascripts/discourse/app/lib/keyboard-shortcuts.js @@ -744,9 +744,7 @@ export default { }, categoriesTopicsList() { - const setting = this.container.lookup("site-settings:main") - .desktop_category_page_style; - switch (setting) { + switch (this.siteSettings.desktop_category_page_style) { case "categories_with_featured_topics": return $(".latest .featured-topic"); case "categories_and_latest_topics": From 135fdd59edca074ba7b5bc7244a587462020ec5a Mon Sep 17 00:00:00 2001 From: David Taylor Date: Thu, 18 Nov 2021 16:38:00 +0000 Subject: [PATCH 016/168] PERF: Improve JS app boot speed by optimizing `customResolve()` (#14990) Time spent in the 'find module with suffix' portion of our `customResolve` function were adding up to around 100ms-150ms when booting the app. This time is spread over 150+ calls, so it's not immediately obvious in flamegraphs. This commit implements a (reversed) [Trie](https://en.wikipedia.org/wiki/Trie) which enables fast suffix-based lookups on a list of strings. In my tests, this requires < 5ms to initialize, and brings the cumulative 'find module with suffix' time down to `< 5ms`. This corresponds to a ~100ms improvement in LCP metrics in my browser. The only behavior change is to remove support for module filenames which are **not** dasherized. I haven't found any core/theme/plugin modules which are not dasherized in their filenames. --- .../discourse-common/addon/lib/suffix-trie.js | 87 +++++++++++++++++++ .../discourse-common/addon/resolver.js | 22 +++-- .../tests/unit/lib/suffix-trie-test.js | 30 +++++++ 3 files changed, 132 insertions(+), 7 deletions(-) create mode 100644 app/assets/javascripts/discourse-common/addon/lib/suffix-trie.js create mode 100644 app/assets/javascripts/discourse/tests/unit/lib/suffix-trie-test.js diff --git a/app/assets/javascripts/discourse-common/addon/lib/suffix-trie.js b/app/assets/javascripts/discourse-common/addon/lib/suffix-trie.js new file mode 100644 index 0000000000..a82358e4f7 --- /dev/null +++ b/app/assets/javascripts/discourse-common/addon/lib/suffix-trie.js @@ -0,0 +1,87 @@ +class TrieNode { + constructor(name, parent) { + this.name = name; + this.parent = parent; + this.children = new Map(); + this.leafIndex = null; + } +} + +// Given a set of strings, this class can allow efficient lookups +// based on suffixes. +// +// By default, it will create one Trie node per character. If your data +// has known delimiters (e.g. / in file paths), you can pass a separator +// to the constructor for better performance. +// +// Matching results will be returned in insertion order +export default class SuffixTrie { + constructor(separator = "") { + this._trie = new TrieNode(); + this.separator = separator; + this._nextIndex = 0; + } + + add(value) { + const nodeNames = value.split(this.separator); + let currentNode = this._trie; + + // Iterate over the nodes backwards. The last one should be + // at the root of the tree + for (let i = nodeNames.length - 1; i >= 0; i--) { + let newNode = currentNode.children.get(nodeNames[i]); + if (!newNode) { + newNode = new TrieNode(nodeNames[i], currentNode); + currentNode.children.set(nodeNames[i], newNode); + } + currentNode = newNode; + } + + currentNode.leafIndex = this._nextIndex++; + } + + withSuffix(suffix, resultCount = null) { + const nodeNames = suffix.split(this.separator); + + // Traverse the tree to find the root node for this suffix + let node = this._trie; + for (let i = nodeNames.length - 1; i >= 0; i--) { + node = node.children.get(nodeNames[i]); + if (!node) { + return []; + } + } + + // Find all the leaves which are descendents of that node + const leaves = []; + const descendentNodes = [node]; + while (descendentNodes.length > 0) { + const thisDescendent = descendentNodes.pop(); + if (thisDescendent.leafIndex !== null) { + leaves.push(thisDescendent); + } + descendentNodes.push(...thisDescendent.children.values()); + } + + // Sort them in-place according to insertion order + leaves.sort((a, b) => (a.leafIndex < b.leafIndex ? -1 : 1)); + + // If a subset of results have been requested, truncate + if (resultCount !== null) { + leaves.splice(resultCount); + } + + // Calculate their full names, and return the joined string + return leaves.map((leafNode) => { + const parts = [leafNode.name]; + + let ancestorNode = leafNode; + while (typeof ancestorNode.parent?.name === "string") { + parts.push(ancestorNode.parent.name); + ancestorNode = ancestorNode.parent; + } + + return parts.join(this.separator); + }); + } +} diff --git a/app/assets/javascripts/discourse-common/addon/resolver.js b/app/assets/javascripts/discourse-common/addon/resolver.js index 057f173563..22d13c56ca 100644 --- a/app/assets/javascripts/discourse-common/addon/resolver.js +++ b/app/assets/javascripts/discourse-common/addon/resolver.js @@ -2,8 +2,10 @@ import { classify, dasherize } from "@ember/string"; import deprecated from "discourse-common/lib/deprecated"; import { findHelper } from "discourse-common/lib/helpers"; import { get } from "@ember/object"; +import SuffixTrie from "discourse-common/lib/suffix-trie"; let _options = {}; +let moduleSuffixTrie = null; export function setResolverOption(name, value) { _options[name] = value; @@ -34,6 +36,18 @@ function parseName(fullName) { }; } +function lookupModuleBySuffix(suffix) { + if (!moduleSuffixTrie) { + moduleSuffixTrie = new SuffixTrie("/"); + Object.keys(requirejs.entries).forEach((name) => { + if (!name.includes("/templates/")) { + moduleSuffixTrie.add(name); + } + }); + } + return moduleSuffixTrie.withSuffix(suffix, 1)[0]; +} + export function buildResolver(baseName) { return Ember.DefaultResolver.extend({ parseName, @@ -107,13 +121,7 @@ export function buildResolver(baseName) { // If we end with the name we want, use it. This allows us to define components within plugins. const suffix = parsedName.type + "s/" + parsedName.fullNameWithoutType, dashed = dasherize(suffix), - moduleName = Object.keys(requirejs.entries).find(function (e) { - return ( - e.indexOf("/templates/") === -1 && - (e.indexOf(suffix, e.length - suffix.length) !== -1 || - e.indexOf(dashed, e.length - dashed.length) !== -1) - ); - }); + moduleName = lookupModuleBySuffix(dashed); let module; if (moduleName) { diff --git a/app/assets/javascripts/discourse/tests/unit/lib/suffix-trie-test.js b/app/assets/javascripts/discourse/tests/unit/lib/suffix-trie-test.js new file mode 100644 index 0000000000..363c287db2 --- /dev/null +++ b/app/assets/javascripts/discourse/tests/unit/lib/suffix-trie-test.js @@ -0,0 +1,30 @@ +import { module, test } from "qunit"; +import SuffixTrie from "discourse-common/lib/suffix-trie"; + +module("Unit | SuffixTrie", function () { + test("SuffixTrie", function (assert) { + const t = new SuffixTrie("/"); + t.add("a/b/c/d"); + t.add("b/a/c/d"); + t.add("c/b/a/d"); + t.add("d/c/b/a"); + + t.add("a/b/c/d/"); + t.add("/a/b/c/d/"); + + // Simple lookups + assert.deepEqual(t.withSuffix("d"), ["a/b/c/d", "b/a/c/d", "c/b/a/d"]); + assert.deepEqual(t.withSuffix("c/d"), ["a/b/c/d", "b/a/c/d"]); + assert.deepEqual(t.withSuffix("b/c/d"), ["a/b/c/d"]); + assert.deepEqual(t.withSuffix("a/b/c/d"), ["a/b/c/d"]); + assert.deepEqual(t.withSuffix("b/a"), ["d/c/b/a"]); + + // With leading/trailing delimiters + assert.deepEqual(t.withSuffix("c/d/"), ["a/b/c/d/", "/a/b/c/d/"]); + assert.deepEqual(t.withSuffix("/a/b/c/d/"), ["/a/b/c/d/"]); + + // Limited lookups + assert.deepEqual(t.withSuffix("d", 1), ["a/b/c/d"]); + assert.deepEqual(t.withSuffix("d", 2), ["a/b/c/d", "b/a/c/d"]); + }); +}); From 510219a0daef50056420762f446aac4c88c90ba1 Mon Sep 17 00:00:00 2001 From: Daniel Waterworth Date: Thu, 18 Nov 2021 11:36:34 -0600 Subject: [PATCH 017/168] DEV: Hide message bus site settings (#15009) Admins don't need to be changing these. --- config/site_settings.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/config/site_settings.yml b/config/site_settings.yml index 39835d5a88..a927de4c5f 100644 --- a/config/site_settings.yml +++ b/config/site_settings.yml @@ -1863,26 +1863,33 @@ developer: hidden: true default: "" enable_long_polling: + hidden: true client: true default: true enable_chunked_encoding: + hidden: true client: true default: true long_polling_interval: + hidden: true default: 25000 max: 25000 long_polling_base_url: + hidden: true client: true default: "/" background_polling_interval: + hidden: true client: true default: 60000 max: 99000 polling_interval: + hidden: true client: true default: 3000 max: 99000 anon_polling_interval: + hidden: true client: true default: 30000 max: 99000 From ed2c3ebd7118955789897d0dc5b0d16d0c5e9235 Mon Sep 17 00:00:00 2001 From: David Taylor Date: Thu, 18 Nov 2021 18:02:16 +0000 Subject: [PATCH 018/168] PERF: Move `preload` hints to the `` (#15008) We have two JS assets which are included in the `` of responses. We were including the `` so that the browser discovers it earlier, and can start preloading the assets while the body is loading. --- app/views/layouts/application.html.erb | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/app/views/layouts/application.html.erb b/app/views/layouts/application.html.erb index 23c2fbda66..53a4410a6c 100644 --- a/app/views/layouts/application.html.erb +++ b/app/views/layouts/application.html.erb @@ -21,6 +21,8 @@ <%= build_plugin_html 'server:before-script-load' %> + " as="script"> + " as="script"> <%= preload_script 'browser-detect' %> <%= preload_script "locales/#{I18n.locale}" %> @@ -113,11 +115,12 @@ <% end %> - <%= preload_script "start-discourse" %> + + <%= yield :data %> - <%= preload_script 'browser-update' %> + <%- unless customization_disabled? %> <%= theme_lookup("body_tag") %> From 6ae065f9cd101ff14c3fda77d41b05364cae80d6 Mon Sep 17 00:00:00 2001 From: Dan Ungureanu Date: Thu, 18 Nov 2021 20:19:02 +0200 Subject: [PATCH 019/168] Improved create invite modal (#14151) * FEATURE: Always show advanced invite options The UI is more simple and more efficient than how it was when the advanced options toggle was introduced. It does not make sense to keep it anymore. * UX: Minor copy edits * UX: Merge expire invite controls There were two controls in the create invite modal. One was a static text that displayed how much time is left until the invite expires. The other one was a datetime selector that set the time the invite expires. This commit merges the two controls in a single one: staff users will continue to see the datetime selector without the static text and regular users will only see the static text because they cannot set when the invite expires. * UX: Remove invite link It should only be visible after the invite was created. --- .../app/components/future-date-input.js | 52 ++++- .../app/controllers/create-invite.js | 78 ++++---- .../discourse/app/controllers/share-topic.js | 6 +- .../app/controllers/user-invited-show.js | 3 +- .../discourse/app/routes/group-index.js | 2 - .../app/templates/modal/create-invite.hbs | 182 +++++++++--------- .../acceptance/create-invite-modal-test.js | 51 +---- .../stylesheets/common/base/share_link.scss | 18 +- app/assets/stylesheets/desktop/modal.scss | 5 + config/locales/client.en.yml | 8 +- config/site_settings.yml | 1 + 11 files changed, 213 insertions(+), 193 deletions(-) diff --git a/app/assets/javascripts/discourse/app/components/future-date-input.js b/app/assets/javascripts/discourse/app/components/future-date-input.js index f673f73ae5..3f17a595ec 100644 --- a/app/assets/javascripts/discourse/app/components/future-date-input.js +++ b/app/assets/javascripts/discourse/app/components/future-date-input.js @@ -1,8 +1,10 @@ -import { and, empty, equal } from "@ember/object/computed"; -import { action } from "@ember/object"; import Component from "@ember/component"; -import { FORMAT } from "select-kit/components/future-date-input-selector"; +import { action } from "@ember/object"; +import { and, empty, equal } from "@ember/object/computed"; +import { CLOSE_STATUS_TYPE } from "discourse/controllers/edit-topic-timer"; +import buildTimeframes from "discourse/lib/timeframes-builder"; import I18n from "I18n"; +import { FORMAT } from "select-kit/components/future-date-input-selector"; export default Component.extend({ selection: null, @@ -20,12 +22,17 @@ export default Component.extend({ this._super(...arguments); if (this.input) { - const datetime = moment(this.input); - this.setProperties({ - selection: "pick_date_and_time", - _date: datetime.format("YYYY-MM-DD"), - _time: datetime.format("HH:mm"), - }); + const dateTime = moment(this.input); + const closestTimeframe = this.findClosestTimeframe(dateTime); + if (closestTimeframe) { + this.set("selection", closestTimeframe.id); + } else { + this.setProperties({ + selection: "pick_date_and_time", + _date: dateTime.format("YYYY-MM-DD"), + _time: dateTime.format("HH:mm"), + }); + } } }, @@ -64,4 +71,31 @@ export default Component.extend({ this.attrs.onChangeInput && this.attrs.onChangeInput(null); } }, + + findClosestTimeframe(dateTime) { + const now = moment(); + + const futureDateInputSelectorOptions = { + now, + day: now.day(), + includeWeekend: this.includeWeekend, + includeMidFuture: this.includeMidFuture || true, + includeFarFuture: this.includeFarFuture, + includeDateTime: this.includeDateTime, + canScheduleNow: this.includeNow || false, + canScheduleToday: 24 - now.hour() > 6, + }; + + return buildTimeframes(futureDateInputSelectorOptions).find((tf) => { + const tfDateTime = tf.when( + moment(), + this.statusType !== CLOSE_STATUS_TYPE ? 8 : 18 + ); + + if (tfDateTime) { + const diff = tfDateTime.diff(dateTime); + return 0 <= diff && diff < 60 * 1000; + } + }); + }, }); diff --git a/app/assets/javascripts/discourse/app/controllers/create-invite.js b/app/assets/javascripts/discourse/app/controllers/create-invite.js index 1a77404237..2a4cb7e841 100644 --- a/app/assets/javascripts/discourse/app/controllers/create-invite.js +++ b/app/assets/javascripts/discourse/app/controllers/create-invite.js @@ -9,6 +9,7 @@ import ModalFunctionality from "discourse/mixins/modal-functionality"; import Group from "discourse/models/group"; import Invite from "discourse/models/invite"; import I18n from "I18n"; +import { FORMAT } from "select-kit/components/future-date-input-selector"; export default Controller.extend( ModalFunctionality, @@ -16,13 +17,16 @@ export default Controller.extend( { allGroups: null, + flashText: null, + flashClass: null, + flashLink: false, + invite: null, invites: null, - showAdvanced: false, + editing: false, inviteToTopic: false, limitToEmail: false, - autogenerated: false, isLink: empty("buffered.email"), isEmail: notEmpty("buffered.email"), @@ -33,37 +37,33 @@ export default Controller.extend( }); this.setProperties({ + flashText: null, + flashClass: null, + flashLink: false, invite: null, invites: null, - showAdvanced: false, + editing: false, inviteToTopic: false, limitToEmail: false, - autogenerated: false, }); this.setInvite(Invite.create()); + this.buffered.setProperties({ + max_redemptions_allowed: 1, + expires_at: moment() + .add(this.siteSettings.invite_expiry_days, "days") + .format(FORMAT), + }); }, onClose() { - if (this.autogenerated) { - this.invite - .destroy() - .then(() => this.invites && this.invites.removeObject(this.invite)); - } + this.appEvents.trigger("modal-body:clearFlash"); }, setInvite(invite) { this.set("invite", invite); }, - setAutogenerated(value) { - if (this.invites && (this.autogenerated || !this.invite.id) && !value) { - this.invites.unshiftObject(this.invite); - } - - this.set("autogenerated", value); - }, - save(opts) { const data = { ...this.buffered.buffer }; @@ -101,29 +101,37 @@ export default Controller.extend( .save(data) .then((result) => { this.rollbackBuffer(); - this.setAutogenerated(opts.autogenerated); + + if ( + this.invites && + !this.invites.any((i) => i.id === this.invite.id) + ) { + this.invites.unshiftObject(this.invite); + } + if (result.warnings) { - this.appEvents.trigger("modal-body:flash", { - text: result.warnings.join(","), - messageClass: "warning", + this.setProperties({ + flashText: result.warnings.join(","), + flashClass: "warning", + flashLink: !this.editing, }); - } else if (!this.autogenerated) { + } else { if (this.isEmail && opts.sendEmail) { this.send("closeModal"); } else { - this.appEvents.trigger("modal-body:flash", { - text: opts.copy - ? I18n.t("user.invited.invite.invite_copied") - : I18n.t("user.invited.invite.invite_saved"), - messageClass: "success", + this.setProperties({ + flashText: I18n.t("user.invited.invite.invite_saved"), + flashClass: "success", + flashLink: !this.editing, }); } } }) .catch((e) => - this.appEvents.trigger("modal-body:flash", { - text: extractError(e), - messageClass: "error", + this.setProperties({ + flashText: extractError(e), + flashClass: "error", + flashLink: false, }) ); }, @@ -155,11 +163,6 @@ export default Controller.extend( return staff || groups.any((g) => g.owner); }, - @discourseComputed("currentUser.staff", "isEmail", "canInviteToGroup") - hasAdvanced(staff, isEmail, canInviteToGroup) { - return staff || isEmail || canInviteToGroup; - }, - @action copied() { this.save({ sendEmail: false, copy: true }); @@ -178,10 +181,5 @@ export default Controller.extend( this.set("buffered.email", result[0].email[0]); }); }, - - @action - toggleAdvanced() { - this.toggleProperty("showAdvanced"); - }, } ); diff --git a/app/assets/javascripts/discourse/app/controllers/share-topic.js b/app/assets/javascripts/discourse/app/controllers/share-topic.js index d1c3657be0..e3cf0ed5c9 100644 --- a/app/assets/javascripts/discourse/app/controllers/share-topic.js +++ b/app/assets/javascripts/discourse/app/controllers/share-topic.js @@ -128,15 +128,11 @@ export default Controller.extend( inviteUsers() { this.set("showNotifyUsers", false); const controller = showModal("create-invite"); - controller.setProperties({ - showAdvanced: true, - inviteToTopic: true, - }); + controller.set("inviteToTopic", true); controller.buffered.setProperties({ topicId: this.topic.id, topicTitle: this.topic.title, }); - controller.save({ autogenerated: true }); }, } ); diff --git a/app/assets/javascripts/discourse/app/controllers/user-invited-show.js b/app/assets/javascripts/discourse/app/controllers/user-invited-show.js index f7f98519a9..50c36460c1 100644 --- a/app/assets/javascripts/discourse/app/controllers/user-invited-show.js +++ b/app/assets/javascripts/discourse/app/controllers/user-invited-show.js @@ -66,7 +66,6 @@ export default Controller.extend({ createInvite() { const controller = showModal("create-invite"); controller.set("invites", this.model.invites); - controller.save({ autogenerated: true }); }, @action @@ -77,7 +76,7 @@ export default Controller.extend({ @action editInvite(invite) { const controller = showModal("create-invite"); - controller.set("showAdvanced", true); + controller.set("editing", true); controller.setInvite(invite); }, diff --git a/app/assets/javascripts/discourse/app/routes/group-index.js b/app/assets/javascripts/discourse/app/routes/group-index.js index 1a9ff92a92..c128678090 100644 --- a/app/assets/javascripts/discourse/app/routes/group-index.js +++ b/app/assets/javascripts/discourse/app/routes/group-index.js @@ -32,9 +32,7 @@ export default DiscourseRoute.extend({ showInviteModal() { const model = this.modelFor("group"); const controller = showModal("create-invite"); - controller.set("showAdvanced", true); controller.buffered.set("groupIds", [model.id]); - controller.save({ autogenerated: true }); }, @action diff --git a/app/assets/javascripts/discourse/app/templates/modal/create-invite.hbs b/app/assets/javascripts/discourse/app/templates/modal/create-invite.hbs index 6754afe8d7..d31844c484 100644 --- a/app/assets/javascripts/discourse/app/templates/modal/create-invite.hbs +++ b/app/assets/javascripts/discourse/app/templates/modal/create-invite.hbs @@ -1,21 +1,40 @@ -{{#d-modal-body title=(if invite.id "user.invited.invite.edit_title" "user.invited.invite.new_title")}} -
-
diff --git a/app/assets/javascripts/discourse/tests/acceptance/create-invite-modal-test.js b/app/assets/javascripts/discourse/tests/acceptance/create-invite-modal-test.js index 27d6abfe89..f4e577baab 100644 --- a/app/assets/javascripts/discourse/tests/acceptance/create-invite-modal-test.js +++ b/app/assets/javascripts/discourse/tests/acceptance/create-invite-modal-test.js @@ -4,15 +4,12 @@ import { count, exists, fakeTime, - query, queryAll, } from "discourse/tests/helpers/qunit-helpers"; -import { test } from "qunit"; import I18n from "I18n"; +import { test } from "qunit"; acceptance("Invites - Create & Edit Invite Modal", function (needs) { - let deleted; - needs.user(); needs.pretender((server, helper) => { const inviteData = { @@ -42,30 +39,17 @@ acceptance("Invites - Create & Edit Invite Modal", function (needs) { }); server.delete("/invites", () => { - deleted = true; return helper.response({}); }); }); - needs.hooks.beforeEach(() => { - deleted = false; - }); test("basic functionality", async function (assert) { await visit("/u/eviltrout/invited/pending"); await click(".user-invite-buttons .btn:first-child"); - assert.strictEqual( - query("input.invite-link").value, - "http://example.com/invites/52641ae8878790bc7b79916247cfe6ba", - "shows an invite link when modal is opened" - ); - await click(".modal-footer .show-advanced"); - await assert.ok(exists(".invite-to-groups"), "shows advanced options"); - await assert.ok(exists(".invite-to-topic"), "shows advanced options"); - await assert.ok(exists(".invite-expires-at"), "shows advanced options"); - - await click(".modal-close"); - assert.ok(deleted, "deletes the invite if not saved"); + await assert.ok(exists(".invite-to-groups")); + await assert.ok(exists(".invite-to-topic")); + await assert.ok(exists(".invite-expires-at")); }); test("saving", async function (assert) { @@ -81,31 +65,14 @@ acceptance("Invites - Create & Edit Invite Modal", function (needs) { 1, "adds invite to list after saving" ); - - await click(".modal-close"); - assert.notOk(deleted, "does not delete invite on close"); }); test("copying saves invite", async function (assert) { await visit("/u/eviltrout/invited/pending"); await click(".user-invite-buttons .btn:first-child"); - await click(".invite-link .btn"); - - await click(".modal-close"); - assert.notOk(deleted, "does not delete invite on close"); - }); - - test("copying an email invite without an email shows error message", async function (assert) { - await visit("/u/eviltrout/invited/pending"); - await click(".user-invite-buttons .btn:first-child"); - - await fillIn("#invite-email", "error"); - await click(".invite-link .btn"); - assert.strictEqual( - query("#modal-alert").innerText, - "error isn't a valid email address." - ); + await click(".save-invite"); + assert.ok(exists(".invite-link .btn")); }); }); @@ -159,7 +126,10 @@ acceptance("Invites - Email Invites", function (needs) { groups: [], }; - server.post("/invites", () => helper.response(inviteData)); + server.post("/invites", (request) => { + lastRequest = request; + return helper.response(inviteData); + }); server.put("/invites/1", (request) => { lastRequest = request; @@ -232,7 +202,6 @@ acceptance( await visit("/u/eviltrout/invited/pending"); await click(".user-invite-buttons .btn:first-child"); - await click(".modal-footer .show-advanced"); await click(".future-date-input-selector-header"); const options = Array.from( diff --git a/app/assets/stylesheets/common/base/share_link.scss b/app/assets/stylesheets/common/base/share_link.scss index 14cc953260..396d2c8a1c 100644 --- a/app/assets/stylesheets/common/base/share_link.scss +++ b/app/assets/stylesheets/common/base/share_link.scss @@ -182,9 +182,10 @@ } } - .show-advanced { - margin-left: auto; - margin-right: 0; + .invite-custom-message { + label { + margin-left: 1.75em; + } } .input-group { @@ -198,5 +199,16 @@ margin-left: 1.75em; width: calc(100% - 1.75em); } + + .future-date-input-date-picker, + .future-date-input-time-picker { + display: inline-block; + margin: 0em 0em 0em 1.75em; + width: calc(50% - 2em); + + input { + height: 34px; + } + } } } diff --git a/app/assets/stylesheets/desktop/modal.scss b/app/assets/stylesheets/desktop/modal.scss index 3c7368df84..c4a54245d2 100644 --- a/app/assets/stylesheets/desktop/modal.scss +++ b/app/assets/stylesheets/desktop/modal.scss @@ -111,6 +111,11 @@ .create-invite-modal, .create-invite-bulk-modal, .share-topic-modal { + &.modal .modal-body { + margin: 1em; + padding: unset; + } + .modal-inner-container { width: 40em; // scale with user font-size max-width: 100vw; // prevent overflow if user font-size is enourmous diff --git a/config/locales/client.en.yml b/config/locales/client.en.yml index bb9066166b..7fe17831b6 100644 --- a/config/locales/client.en.yml +++ b/config/locales/client.en.yml @@ -1613,7 +1613,7 @@ en: new_title: "Create Invite" edit_title: "Edit Invite" - instructions: "Share this link to instantly grant access to this site" + instructions: "Share this link to instantly grant access to this site:" copy_link: "copy link" expires_in_time: "Expires in %{time}" expired_at_time: "Expired at %{time}" @@ -1621,20 +1621,20 @@ en: show_advanced: "Show Advanced Options" hide_advanced: "Hide Advanced Options" - restrict_email: "Restrict to one email address" + restrict_email: "Restrict to email" max_redemptions_allowed: "Max uses" add_to_groups: "Add to groups" - invite_to_topic: "Arrive at this topic" + invite_to_topic: "Arrive at topic" expires_at: "Expire after" custom_message: "Optional personal message" send_invite_email: "Save and Send Email" + send_invite_email_instructions: "Restrict invite to email to send an invite email" save_invite: "Save Invite" invite_saved: "Invite saved." - invite_copied: "Invite link copied." bulk_invite: none: "No invitations to display on this page." diff --git a/config/site_settings.yml b/config/site_settings.yml index a927de4c5f..a1bad6eab0 100644 --- a/config/site_settings.yml +++ b/config/site_settings.yml @@ -586,6 +586,7 @@ users: default: true invite_expiry_days: default: 30 + client: true max: 36500 invites_per_page: client: true From 9ebfcbb8670aaa28f27609a55d2bff556697b061 Mon Sep 17 00:00:00 2001 From: Jordan Vidrine <30537603+jordanvidrine@users.noreply.github.com> Date: Thu, 18 Nov 2021 13:32:44 -0600 Subject: [PATCH 020/168] FIX: Topic list separator fix (#15011) --- app/assets/stylesheets/common/base/_topic-list.scss | 2 +- app/assets/stylesheets/desktop/topic-list.scss | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/assets/stylesheets/common/base/_topic-list.scss b/app/assets/stylesheets/common/base/_topic-list.scss index eb2d7036e0..7480ccf030 100644 --- a/app/assets/stylesheets/common/base/_topic-list.scss +++ b/app/assets/stylesheets/common/base/_topic-list.scss @@ -199,7 +199,7 @@ border: none; .topic-list-data { - border-bottom: 1px solid var(--danger-medium); + border-top: 1px solid var(--danger-medium); line-height: 0em; padding: 0; text-align: center; diff --git a/app/assets/stylesheets/desktop/topic-list.scss b/app/assets/stylesheets/desktop/topic-list.scss index 78346b9e42..ad93fb44a9 100644 --- a/app/assets/stylesheets/desktop/topic-list.scss +++ b/app/assets/stylesheets/desktop/topic-list.scss @@ -211,7 +211,7 @@ button.dismiss-read { .topic-list { // tighter table header spacing - .topic-list-data:first-of-type { + .topic-list-item .topic-list-data:first-of-type { padding: 12px 5px; } // smaller table cell spacing From 2c045c636832e6fd1b5b42fa84d1f295fbdc698a Mon Sep 17 00:00:00 2001 From: Robin Ward Date: Wed, 17 Nov 2021 15:03:15 -0500 Subject: [PATCH 021/168] FIX: Don't overwrite computed property for loading spinner fix This fixes an issue CvX found on PR #14666 where a previous fix overwrote a computed property. The better fix (as is often the case with Ember) is to remove an observer and call methods when things change ourselves. --- .../discourse/app/controllers/discovery.js | 11 +++++++---- .../discourse/app/controllers/discovery/topics.js | 5 ++--- .../javascripts/discourse/app/routes/discovery.js | 9 ++++----- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/app/assets/javascripts/discourse/app/controllers/discovery.js b/app/assets/javascripts/discourse/app/controllers/discovery.js index 354465da4e..c06ad44e2b 100644 --- a/app/assets/javascripts/discourse/app/controllers/discovery.js +++ b/app/assets/javascripts/discourse/app/controllers/discovery.js @@ -2,7 +2,6 @@ import Controller, { inject as controller } from "@ember/controller"; import { alias, equal, not } from "@ember/object/computed"; import Category from "discourse/models/category"; import DiscourseURL from "discourse/lib/url"; -import { observes } from "discourse-common/utils/decorators"; import { inject as service } from "@ember/service"; export default Controller.extend({ @@ -14,7 +13,6 @@ export default Controller.extend({ "router.currentRouteName", "discovery.categories" ), - loading: false, category: alias("navigationCategory.category"), @@ -22,8 +20,13 @@ export default Controller.extend({ loadedAllItems: not("discoveryTopics.model.canLoadMore"), - @observes("loadedAllItems") - _showFooter() { + loadingBegan() { + this.set("loading", true); + this.set("application.showFooter", false); + }, + + loadingComplete() { + this.set("loading", false); this.set("application.showFooter", this.loadedAllItems); }, diff --git a/app/assets/javascripts/discourse/app/controllers/discovery/topics.js b/app/assets/javascripts/discourse/app/controllers/discovery/topics.js index b7aefc9587..f19fbdcf22 100644 --- a/app/assets/javascripts/discourse/app/controllers/discovery/topics.js +++ b/app/assets/javascripts/discourse/app/controllers/discovery/topics.js @@ -66,15 +66,14 @@ const controllerOpts = { this.send("resetParams", options.skipResettingParams); // Don't refresh if we're still loading - if (this.get("discovery.loading")) { + if (this.discovery.loading) { return; } // If we `send('loading')` here, due to returning true it bubbles up to the // router and ember throws an error due to missing `handlerInfos`. // Lesson learned: Don't call `loading` yourself. - this.set("discovery.loading", true); - this.set("model.canLoadMore", true); + this.discovery.loadingBegan(); this.topicTrackingState.resetTracking(); diff --git a/app/assets/javascripts/discourse/app/routes/discovery.js b/app/assets/javascripts/discourse/app/routes/discovery.js index a0d2bcc848..c8ab3ad57f 100644 --- a/app/assets/javascripts/discourse/app/routes/discovery.js +++ b/app/assets/javascripts/discourse/app/routes/discovery.js @@ -46,25 +46,24 @@ export default DiscourseRoute.extend(OpenComposer, { actions: { loading() { - this.controllerFor("discovery").set("loading", true); + this.controllerFor("discovery").loadingBegan(); + + // We don't want loading to bubble return true; }, loadingComplete() { - this.controllerFor("discovery").set("loading", false); + this.controllerFor("discovery").loadingComplete(); if (!this.session.get("topicListScrollPosition")) { scrollTop(); } - return false; }, didTransition() { - this.controllerFor("discovery")._showFooter(); this.send("loadingComplete"); const model = this.controllerFor("discovery/topics").get("model"); setTopicList(model); - return false; }, // clear a pinned topic From e6670393df5fb84eebf72deaa9eea07bb8ca6fcc Mon Sep 17 00:00:00 2001 From: Jarek Radosz Date: Thu, 18 Nov 2021 21:49:58 +0100 Subject: [PATCH 022/168] DEV: Refactor `logs-notice`/`global-notice` (#15000) --- .../discourse/app/components/global-notice.js | 280 +++++++++--------- .../inject-discourse-objects.js | 2 +- .../discourse/app/services/logs-notice.js | 37 +-- 3 files changed, 156 insertions(+), 163 deletions(-) diff --git a/app/assets/javascripts/discourse/app/components/global-notice.js b/app/assets/javascripts/discourse/app/components/global-notice.js index 82936c7a59..c526bebef2 100644 --- a/app/assets/javascripts/discourse/app/components/global-notice.js +++ b/app/assets/javascripts/discourse/app/components/global-notice.js @@ -1,11 +1,11 @@ -import EmberObject, { computed } from "@ember/object"; +import EmberObject, { action } from "@ember/object"; import cookie, { removeCookie } from "discourse/lib/cookie"; import Component from "@ember/component"; import I18n from "I18n"; -import LogsNotice from "discourse/services/logs-notice"; -import { bind } from "discourse-common/utils/decorators"; +import discourseComputed, { bind } from "discourse-common/utils/decorators"; import getURL from "discourse-common/lib/get-url"; import { htmlSafe } from "@ember/template"; +import { inject as service } from "@ember/service"; const _pluginNotices = []; @@ -48,186 +48,190 @@ const Notice = EmberObject.extend({ }); export default Component.extend({ + logsNoticeService: service("logsNotice"), logNotice: null, init() { this._super(...arguments); - this._setupObservers(); + this.logsNoticeService.addObserver("hidden", this._handleLogsNoticeUpdate); + this.logsNoticeService.addObserver("text", this._handleLogsNoticeUpdate); }, willDestroyElement() { this._super(...arguments); - this._tearDownObservers(); + this.logsNoticeService.removeObserver("text", this._handleLogsNoticeUpdate); + this.logsNoticeService.removeObserver( + "hidden", + this._handleLogsNoticeUpdate + ); }, - notices: computed( + @discourseComputed( "site.isReadOnly", + "site.wizard_required", + "siteSettings.login_required", "siteSettings.disable_emails", - "logNotice.{id,text,hidden}", - function () { - let notices = []; + "siteSettings.global_notice", + "siteSettings.bootstrap_mode_enabled", + "siteSettings.bootstrap_mode_min_users", + "session.safe_mode", + "logNotice.{id,text,hidden}" + ) + notices( + isReadOnly, + wizardRequired, + loginRequired, + disableEmails, + globalNotice, + bootstrapModeEnabled, + bootstrapModeMinUsers, + safeMode, + logNotice + ) { + let notices = []; - if (cookie("dosp") === "1") { - removeCookie("dosp", { path: "/" }); + if (cookie("dosp") === "1") { + removeCookie("dosp", { path: "/" }); + notices.push( + Notice.create({ + text: loginRequired + ? I18n.t("forced_anonymous_login_required") + : I18n.t("forced_anonymous"), + id: "forced-anonymous", + }) + ); + } + + if (safeMode) { + notices.push( + Notice.create({ text: I18n.t("safe_mode.enabled"), id: "safe-mode" }) + ); + } + + if (isReadOnly) { + notices.push( + Notice.create({ + text: I18n.t("read_only_mode.enabled"), + id: "alert-read-only", + }) + ); + } + + if (disableEmails === "yes" || disableEmails === "non-staff") { + notices.push( + Notice.create({ + text: I18n.t("emails_are_disabled"), + id: "alert-emails-disabled", + }) + ); + } + + if (wizardRequired) { + const requiredText = I18n.t("wizard_required", { + url: getURL("/wizard"), + }); + notices.push( + Notice.create({ text: htmlSafe(requiredText), id: "alert-wizard" }) + ); + } + + if (this.currentUser?.staff && bootstrapModeEnabled) { + if (bootstrapModeMinUsers > 0) { notices.push( Notice.create({ - text: this.siteSettings.login_required - ? I18n.t("forced_anonymous_login_required") - : I18n.t("forced_anonymous"), - id: "forced-anonymous", + text: I18n.t("bootstrap_mode_enabled", { + count: bootstrapModeMinUsers, + }), + id: "alert-bootstrap-mode", + }) + ); + } else { + notices.push( + Notice.create({ + text: I18n.t("bootstrap_mode_disabled"), + id: "alert-bootstrap-mode", }) ); } + } - if (this.session && this.session.safe_mode) { - notices.push( - Notice.create({ text: I18n.t("safe_mode.enabled"), id: "safe-mode" }) - ); + if (globalNotice?.length > 0) { + notices.push( + Notice.create({ + text: globalNotice, + id: "alert-global-notice", + }) + ); + } + + if (logNotice) { + notices.push(logNotice); + } + + return notices.concat(_pluginNotices).filter((notice) => { + if (notice.options.visibility) { + return notice.options.visibility(notice); } - if (this.site.isReadOnly) { - notices.push( - Notice.create({ - text: I18n.t("read_only_mode.enabled"), - id: "alert-read-only", - }) - ); + const key = `${GLOBAL_NOTICE_DISMISSED_PROMPT_KEY}-${notice.id}`; + const value = this.keyValueStore.get(key); + + // banner has never been dismissed + if (!value) { + return true; } - if ( - this.siteSettings.disable_emails === "yes" || - this.siteSettings.disable_emails === "non-staff" - ) { - notices.push( - Notice.create({ - text: I18n.t("emails_are_disabled"), - id: "alert-emails-disabled", - }) - ); + // banner has no persistent dismiss and should always show on load + if (!notice.options.persistentDismiss) { + return true; } - if (this.site.wizard_required) { - const requiredText = I18n.t("wizard_required", { - url: getURL("/wizard"), - }); - notices.push( - Notice.create({ text: htmlSafe(requiredText), id: "alert-wizard" }) - ); + if (notice.options.dismissDuration) { + const resetAt = moment(value).add(notice.options.dismissDuration); + return moment().isAfter(resetAt); + } else { + return false; } + }); + }, - if ( - this.get("currentUser.staff") && - this.siteSettings.bootstrap_mode_enabled - ) { - if (this.siteSettings.bootstrap_mode_min_users > 0) { - notices.push( - Notice.create({ - text: I18n.t("bootstrap_mode_enabled", { - count: this.siteSettings.bootstrap_mode_min_users, - }), - id: "alert-bootstrap-mode", - }) - ); - } else { - notices.push( - Notice.create({ - text: I18n.t("bootstrap_mode_disabled"), - id: "alert-bootstrap-mode", - }) - ); - } - } + @action + dismissNotice(notice) { + if (notice.options.onDismiss) { + notice.options.onDismiss(notice); + } - if ( - this.siteSettings.global_notice && - this.siteSettings.global_notice.length - ) { - notices.push( - Notice.create({ - text: this.siteSettings.global_notice, - id: "alert-global-notice", - }) - ); - } - - if (this.logNotice) { - notices.push(this.logNotice); - } - - return notices.concat(_pluginNotices).filter((notice) => { - if (notice.options.visibility) { - return notice.options.visibility(notice); - } else { - const key = `${GLOBAL_NOTICE_DISMISSED_PROMPT_KEY}-${notice.id}`; - const value = this.keyValueStore.get(key); - - // banner has never been dismissed - if (!value) { - return true; - } - - // banner has no persistent dismiss and should always show on load - if (!notice.options.persistentDismiss) { - return true; - } - - if (notice.options.dismissDuration) { - const resetAt = moment(value).add(notice.options.dismissDuration); - return moment().isAfter(resetAt); - } else { - return false; - } - } + if (notice.options.persistentDismiss) { + this.keyValueStore.set({ + key: `${GLOBAL_NOTICE_DISMISSED_PROMPT_KEY}-${notice.id}`, + value: moment().toISOString(true), }); } - ), - actions: { - dismissNotice(notice) { - if (notice.options.onDismiss) { - notice.options.onDismiss(notice); - } - - if (notice.options.persistentDismiss) { - this.keyValueStore.set({ - key: `${GLOBAL_NOTICE_DISMISSED_PROMPT_KEY}-${notice.id}`, - value: moment().toISOString(true), - }); - } - - const alert = document.getElementById(`global-notice-${notice.id}`); - if (alert) { - alert.style.display = "none"; - } - }, - }, - - _setupObservers() { - LogsNotice.current().addObserver("hidden", this._handleLogsNoticeUpdate); - LogsNotice.current().addObserver("text", this._handleLogsNoticeUpdate); - }, - - _tearDownObservers() { - LogsNotice.current().removeObserver("text", this._handleLogsNoticeUpdate); - LogsNotice.current().removeObserver("hidden", this._handleLogsNoticeUpdate); + const alert = document.getElementById(`global-notice-${notice.id}`); + if (alert) { + alert.style.display = "none"; + } }, @bind _handleLogsNoticeUpdate() { const logNotice = Notice.create({ - text: htmlSafe(LogsNotice.currentProp("message")), + text: htmlSafe(this.logsNoticeService.message), id: "alert-logs-notice", options: { dismissable: true, persistentDismiss: false, visibility() { - return !LogsNotice.currentProp("hidden"); + return !this.logsNoticeService.hidden; }, onDismiss() { - LogsNotice.currentProp("hidden", true); - LogsNotice.currentProp("text", ""); + this.logsNoticeService.setProperties({ + hidden: true, + text: "", + }); }, }, }); diff --git a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js index 2a92f321fb..a9702e2ce4 100644 --- a/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js +++ b/app/assets/javascripts/discourse/app/pre-initializers/inject-discourse-objects.js @@ -98,7 +98,6 @@ export default { app.inject(t, "store", "service:store"); app.inject(t, "site", "site:main"); app.inject(t, "searchService", "service:search"); - app.inject(t, "keyValueStore", "key-value-store:main"); }); ALL_TARGETS.concat("service").forEach((t) => { @@ -106,6 +105,7 @@ export default { app.inject(t, "messageBus", "message-bus:main"); app.inject(t, "siteSettings", "site-settings:main"); app.inject(t, "topicTrackingState", "topic-tracking-state:main"); + app.inject(t, "keyValueStore", "key-value-store:main"); }); if (currentUser) { diff --git a/app/assets/javascripts/discourse/app/services/logs-notice.js b/app/assets/javascripts/discourse/app/services/logs-notice.js index 6c77e7c6b8..2e3143a907 100644 --- a/app/assets/javascripts/discourse/app/services/logs-notice.js +++ b/app/assets/javascripts/discourse/app/services/logs-notice.js @@ -1,22 +1,26 @@ -import discourseComputed, { - observes, - on, -} from "discourse-common/utils/decorators"; -import EmberObject from "@ember/object"; +import discourseComputed, { observes } from "discourse-common/utils/decorators"; +import Service from "@ember/service"; import I18n from "I18n"; import { autoUpdatingRelativeAge } from "discourse/lib/formatter"; import getURL from "discourse-common/lib/get-url"; import { htmlSafe } from "@ember/template"; import { isEmpty } from "@ember/utils"; +import { readOnly } from "@ember/object/computed"; const LOGS_NOTICE_KEY = "logs-notice-text"; -const LogsNotice = EmberObject.extend({ +export default Service.extend({ text: "", - @on("init") - _setup() { - if (!this.isActivated) { + isAdmin: readOnly("currentUser.admin"), + + init() { + this._super(...arguments); + + if ( + this.siteSettings.alert_admins_if_errors_per_hour === 0 && + this.siteSettings.alert_admins_if_errors_per_minute === 0 + ) { return; } @@ -63,11 +67,6 @@ const LogsNotice = EmberObject.extend({ return htmlSafe(text); }, - @discourseComputed("currentUser") - isAdmin(currentUser) { - return currentUser && currentUser.admin; - }, - @discourseComputed("isEmpty", "isAdmin") hidden(thisIsEmpty, isAdmin) { return !isAdmin || thisIsEmpty; @@ -77,14 +76,4 @@ const LogsNotice = EmberObject.extend({ _updateKeyValueStore() { this.keyValueStore.setItem(LOGS_NOTICE_KEY, this.text); }, - - @discourseComputed( - "siteSettings.alert_admins_if_errors_per_hour", - "siteSettings.alert_admins_if_errors_per_minute" - ) - isActivated(errorsPerHour, errorsPerMinute) { - return errorsPerHour > 0 || errorsPerMinute > 0; - }, }); - -export default LogsNotice; From 9955f1774d45eedbffcc64b8ea0e260ea5130187 Mon Sep 17 00:00:00 2001 From: jbrw Date: Thu, 18 Nov 2021 19:24:15 -0500 Subject: [PATCH 023/168] UX: Move share topic warning to modal flash alert (#15012) --- .../discourse/app/controllers/share-topic.js | 44 +++++++++---------- .../app/templates/modal/share-topic.hbs | 6 --- .../tests/acceptance/share-topic-test.js | 2 +- .../stylesheets/common/base/share_link.scss | 5 --- config/locales/client.en.yml | 4 +- 5 files changed, 23 insertions(+), 38 deletions(-) diff --git a/app/assets/javascripts/discourse/app/controllers/share-topic.js b/app/assets/javascripts/discourse/app/controllers/share-topic.js index e3cf0ed5c9..3c54976d9b 100644 --- a/app/assets/javascripts/discourse/app/controllers/share-topic.js +++ b/app/assets/javascripts/discourse/app/controllers/share-topic.js @@ -22,15 +22,7 @@ export default Controller.extend( this.set("showNotifyUsers", false); if (this.model && this.model.read_restricted) { - Category.reloadBySlugPath(this.model.slug).then((result) => { - this.setProperties({ - restrictedGroups: result.category.group_permissions.map( - (g) => g.group_name - ), - }); - }); - } else { - this.setProperties({ restrictedGroups: null }); + this.restrictedGroupWarning(); } }, @@ -55,21 +47,6 @@ export default Controller.extend( ); }, - @discourseComputed("restrictedGroups") - hasRestrictedGroups(groups) { - return !!groups; - }, - - @discourseComputed("restrictedGroups") - restrictedGroupsCount(groups) { - return groups.length; - }, - - @discourseComputed("restrictedGroups") - restrictedGroupsDisplayText(groups) { - return groups.join(", "); - }, - @action onChangeUsers(usernames) { this.set("users", usernames.uniq()); @@ -134,5 +111,24 @@ export default Controller.extend( topicTitle: this.topic.title, }); }, + + restrictedGroupWarning() { + this.appEvents.on("modal:body-shown", () => { + let restrictedGroups; + Category.reloadBySlugPath(this.model.slug).then((result) => { + restrictedGroups = result.category.group_permissions.map( + (g) => g.group_name + ); + + if (restrictedGroups) { + const message = I18n.t("topic.share.restricted_groups", { + count: restrictedGroups.length, + groupNames: restrictedGroups.join(", "), + }); + this.flash(message, "warning"); + } + }); + }); + }, } ); diff --git a/app/assets/javascripts/discourse/app/templates/modal/share-topic.hbs b/app/assets/javascripts/discourse/app/templates/modal/share-topic.hbs index ff2de50714..330637850b 100644 --- a/app/assets/javascripts/discourse/app/templates/modal/share-topic.hbs +++ b/app/assets/javascripts/discourse/app/templates/modal/share-topic.hbs @@ -13,12 +13,6 @@ - {{#if hasRestrictedGroups}} - - {{/if}}