diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index 340143e9dc..dfb1901a1d 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -83,17 +83,12 @@ Rails/WhereExists: - 'app/lib/activitypub/activity/create.rb' - 'app/lib/delivery_failure_tracker.rb' - 'app/lib/feed_manager.rb' - - 'app/lib/status_cache_hydrator.rb' - 'app/lib/suspicious_sign_in_detector.rb' - - 'app/models/concerns/account/interactions.rb' - - 'app/models/featured_tag.rb' - 'app/models/poll.rb' - 'app/models/session_activation.rb' - 'app/models/status.rb' - - 'app/models/user.rb' - 'app/policies/status_policy.rb' - 'app/serializers/rest/announcement_serializer.rb' - - 'app/serializers/rest/tag_serializer.rb' - 'app/services/activitypub/fetch_remote_status_service.rb' - 'app/services/vote_service.rb' - 'app/validators/reaction_validator.rb' @@ -143,7 +138,6 @@ Style/FetchEnvVar: # AllowedMethods: redirect Style/FormatStringToken: Exclude: - - 'app/models/privacy_policy.rb' - 'config/initializers/devise.rb' - 'lib/paperclip/color_extractor.rb' diff --git a/Gemfile.lock b/Gemfile.lock index df4eaa5f9e..cba0ab1cb0 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -150,7 +150,7 @@ GEM erubi (~> 1.4) parser (>= 2.4) smart_properties - bigdecimal (3.1.5) + bigdecimal (3.1.6) bindata (2.4.15) binding_of_caller (1.0.0) debug_inspector (>= 0.0.1) @@ -398,12 +398,12 @@ GEM activerecord kaminari-core (= 1.2.2) kaminari-core (1.2.2) - kt-paperclip (7.2.1) + kt-paperclip (7.2.2) activemodel (>= 4.2.0) activesupport (>= 4.2.0) marcel (~> 1.0.1) mime-types - terrapin (~> 0.6.0) + terrapin (>= 0.6.0, < 2.0) language_server-protocol (3.17.0.3) launchy (2.5.2) addressable (~> 2.8) @@ -600,8 +600,8 @@ GEM rdf (3.3.1) bcp47_spec (~> 0.2) link_header (~> 0.0, >= 0.0.8) - rdf-normalize (0.6.1) - rdf (~> 3.2) + rdf-normalize (0.7.0) + rdf (~> 3.3) rdoc (6.6.2) psych (>= 4.0.0) redcarpet (3.6.0) diff --git a/app/controllers/admin/action_logs_controller.rb b/app/controllers/admin/action_logs_controller.rb index 37a00ad225..8b8e83fde7 100644 --- a/app/controllers/admin/action_logs_controller.rb +++ b/app/controllers/admin/action_logs_controller.rb @@ -6,7 +6,7 @@ module Admin def index authorize :audit_log, :index? - @auditable_accounts = Account.where(id: Admin::ActionLog.select('distinct account_id')).select(:id, :username) + @auditable_accounts = Account.auditable.select(:id, :username) end private diff --git a/app/controllers/api/v1/accounts/follower_accounts_controller.rb b/app/controllers/api/v1/accounts/follower_accounts_controller.rb index 21b1095f18..d6a5a7176d 100644 --- a/app/controllers/api/v1/accounts/follower_accounts_controller.rb +++ b/app/controllers/api/v1/accounts/follower_accounts_controller.rb @@ -21,7 +21,7 @@ class Api::V1::Accounts::FollowerAccountsController < Api::BaseController return [] if hide_results? scope = default_accounts - scope = scope.where.not(id: current_account.excluded_from_timeline_account_ids) unless current_account.nil? || current_account.id == @account.id + scope = scope.not_excluded_by_account(current_account) unless current_account.nil? || current_account.id == @account.id scope.merge(paginated_follows).to_a end diff --git a/app/controllers/api/v1/accounts/following_accounts_controller.rb b/app/controllers/api/v1/accounts/following_accounts_controller.rb index 1db521f79c..b8578ef539 100644 --- a/app/controllers/api/v1/accounts/following_accounts_controller.rb +++ b/app/controllers/api/v1/accounts/following_accounts_controller.rb @@ -21,7 +21,7 @@ class Api::V1::Accounts::FollowingAccountsController < Api::BaseController return [] if hide_results? scope = default_accounts - scope = scope.where.not(id: current_account.excluded_from_timeline_account_ids) unless current_account.nil? || current_account.id == @account.id + scope = scope.not_excluded_by_account(current_account) unless current_account.nil? || current_account.id == @account.id scope.merge(paginated_follows).to_a end diff --git a/app/controllers/api/v1/peers/search_controller.rb b/app/controllers/api/v1/peers/search_controller.rb index 0c503d9bc5..1780554c5d 100644 --- a/app/controllers/api/v1/peers/search_controller.rb +++ b/app/controllers/api/v1/peers/search_controller.rb @@ -27,7 +27,7 @@ class Api::V1::Peers::SearchController < Api::BaseController @domains = InstancesIndex.query(function_score: { query: { prefix: { - domain: TagManager.instance.normalize_domain(params[:q].strip), + domain: normalized_domain, }, }, @@ -37,11 +37,18 @@ class Api::V1::Peers::SearchController < Api::BaseController }, }).limit(10).pluck(:domain) else - domain = params[:q].strip - domain = TagManager.instance.normalize_domain(domain) - @domains = Instance.searchable.where(Instance.arel_table[:domain].matches("#{Instance.sanitize_sql_like(domain)}%", false, true)).limit(10).pluck(:domain) + domain = normalized_domain + @domains = Instance.searchable.domain_starts_with(domain).limit(10).pluck(:domain) end rescue Addressable::URI::InvalidURIError @domains = [] end + + def normalized_domain + TagManager.instance.normalize_domain(query_value) + end + + def query_value + params[:q].strip + end end diff --git a/app/controllers/auth/sessions_controller.rb b/app/controllers/auth/sessions_controller.rb index 148ad53755..6bc48a7804 100644 --- a/app/controllers/auth/sessions_controller.rb +++ b/app/controllers/auth/sessions_controller.rb @@ -1,6 +1,10 @@ # frozen_string_literal: true class Auth::SessionsController < Devise::SessionsController + include Redisable + + MAX_2FA_ATTEMPTS_PER_HOUR = 10 + layout 'auth' skip_before_action :check_self_destruct! @@ -130,9 +134,23 @@ class Auth::SessionsController < Devise::SessionsController session.delete(:attempt_user_updated_at) end + def clear_2fa_attempt_from_user(user) + redis.del(second_factor_attempts_key(user)) + end + + def check_second_factor_rate_limits(user) + attempts, = redis.multi do |multi| + multi.incr(second_factor_attempts_key(user)) + multi.expire(second_factor_attempts_key(user), 1.hour) + end + + attempts >= MAX_2FA_ATTEMPTS_PER_HOUR + end + def on_authentication_success(user, security_measure) @on_authentication_success_called = true + clear_2fa_attempt_from_user(user) clear_attempt_from_session user.update_sign_in!(new_sign_in: true) @@ -164,4 +182,8 @@ class Auth::SessionsController < Devise::SessionsController user_agent: request.user_agent ) end + + def second_factor_attempts_key(user) + "2fa_auth_attempts:#{user.id}:#{Time.now.utc.hour}" + end end diff --git a/app/controllers/concerns/auth/two_factor_authentication_concern.rb b/app/controllers/concerns/auth/two_factor_authentication_concern.rb index effdb8d21c..404164751a 100644 --- a/app/controllers/concerns/auth/two_factor_authentication_concern.rb +++ b/app/controllers/concerns/auth/two_factor_authentication_concern.rb @@ -66,6 +66,11 @@ module Auth::TwoFactorAuthenticationConcern end def authenticate_with_two_factor_via_otp(user) + if check_second_factor_rate_limits(user) + flash.now[:alert] = I18n.t('users.rate_limited') + return prompt_for_two_factor(user) + end + if valid_otp_attempt?(user) on_authentication_success(user, :otp) else diff --git a/app/helpers/jsonld_helper.rb b/app/helpers/jsonld_helper.rb index 46cef6b087..4c00c72393 100644 --- a/app/helpers/jsonld_helper.rb +++ b/app/helpers/jsonld_helper.rb @@ -163,7 +163,7 @@ module JsonLdHelper end end - def fetch_resource(uri, id, on_behalf_of = nil) + def fetch_resource(uri, id, on_behalf_of = nil, request_options: {}) unless id json = fetch_resource_without_id_validation(uri, on_behalf_of) @@ -172,14 +172,14 @@ module JsonLdHelper uri = json['id'] end - json = fetch_resource_without_id_validation(uri, on_behalf_of) + json = fetch_resource_without_id_validation(uri, on_behalf_of, request_options: request_options) json.present? && json['id'] == uri ? json : nil end - def fetch_resource_without_id_validation(uri, on_behalf_of = nil, raise_on_temporary_error = false) + def fetch_resource_without_id_validation(uri, on_behalf_of = nil, raise_on_temporary_error = false, request_options: {}) on_behalf_of ||= Account.representative - build_request(uri, on_behalf_of).perform do |response| + build_request(uri, on_behalf_of, options: request_options).perform do |response| raise Mastodon::UnexpectedResponseError, response unless response_successful?(response) || response_error_unsalvageable?(response) || !raise_on_temporary_error body_to_json(response.body_with_limit) if response.code == 200 @@ -212,8 +212,8 @@ module JsonLdHelper response.code == 501 || ((400...500).cover?(response.code) && ![401, 408, 429].include?(response.code)) end - def build_request(uri, on_behalf_of = nil) - Request.new(:get, uri).tap do |request| + def build_request(uri, on_behalf_of = nil, options: {}) + Request.new(:get, uri, **options).tap do |request| request.on_behalf_of(on_behalf_of) if on_behalf_of request.add_headers('Accept' => 'application/activity+json, application/ld+json') end diff --git a/app/javascript/mastodon/actions/search.js b/app/javascript/mastodon/actions/search.js index 38a089b486..a34a490e76 100644 --- a/app/javascript/mastodon/actions/search.js +++ b/app/javascript/mastodon/actions/search.js @@ -179,6 +179,11 @@ export const openURL = (value, history, onFailure) => (dispatch, getState) => { export const clickSearchResult = (q, type) => (dispatch, getState) => { const previous = getState().getIn(['search', 'recent']); + + if (previous.some(x => x.get('q') === q && x.get('type') === type)) { + return; + } + const me = getState().getIn(['meta', 'me']); const current = previous.add(fromJS({ type, q })).takeLast(4); @@ -207,4 +212,4 @@ export const hydrateSearch = () => (dispatch, getState) => { if (history !== null) { dispatch(updateSearchHistory(history)); } -}; \ No newline at end of file +}; diff --git a/app/javascript/mastodon/features/compose/components/search.jsx b/app/javascript/mastodon/features/compose/components/search.jsx index a154984171..7c5c39f037 100644 --- a/app/javascript/mastodon/features/compose/components/search.jsx +++ b/app/javascript/mastodon/features/compose/components/search.jsx @@ -62,17 +62,17 @@ class Search extends PureComponent { }; defaultOptions = [ - { label: <>has: , action: e => { e.preventDefault(); this._insertText('has:'); } }, - { label: <>is: , action: e => { e.preventDefault(); this._insertText('is:'); } }, - { label: <>my: , action: e => { e.preventDefault(); this._insertText('my:'); } }, - { label: <>language: , action: e => { e.preventDefault(); this._insertText('language:'); } }, - { label: <>from: , action: e => { e.preventDefault(); this._insertText('from:'); } }, - { label: <>domain: , action: e => { e.preventDefault(); this._insertText('domain:'); } }, - { label: <>before: , action: e => { e.preventDefault(); this._insertText('before:'); } }, - { label: <>during: , action: e => { e.preventDefault(); this._insertText('during:'); } }, - { label: <>after: , action: e => { e.preventDefault(); this._insertText('after:'); } }, - { label: <>in: , action: e => { e.preventDefault(); this._insertText('in:'); } }, - { label: <>order: , action: e => { e.preventDefault(); this._insertText('order:'); } }, + { key: 'prompt-has', label: <>has: , action: e => { e.preventDefault(); this._insertText('has:'); } }, + { key: 'prompt-is', label: <>is: , action: e => { e.preventDefault(); this._insertText('is:'); } }, + { key: 'prompt-my', label: <>my: , action: e => { e.preventDefault(); this._insertText('my:'); } }, + { key: 'prompt-language', label: <>language: , action: e => { e.preventDefault(); this._insertText('language:'); } }, + { key: 'prompt-from', label: <>from: , action: e => { e.preventDefault(); this._insertText('from:'); } }, + { key: 'prompt-domain', label: <>domain: , action: e => { e.preventDefault(); this._insertText('domain:'); } }, + { key: 'prompt-before', label: <>before: , action: e => { e.preventDefault(); this._insertText('before:'); } }, + { key: 'prompt-during', label: <>during: , action: e => { e.preventDefault(); this._insertText('during:'); } }, + { key: 'prompt-after', label: <>after: , action: e => { e.preventDefault(); this._insertText('after:'); } }, + { key: 'prompt-in', label: <>in: , action: e => { e.preventDefault(); this._insertText('in:'); } }, + { key: 'prompt-order', label: <>order: , action: e => { e.preventDefault(); this._insertText('order:'); } }, ]; setRef = c => { @@ -265,6 +265,8 @@ class Search extends PureComponent { const { recent } = this.props; return recent.toArray().map(search => ({ + key: `${search.get('type')}/${search.get('q')}`, + label: labelForRecentSearch(search), action: () => this.handleRecentSearchClick(search), @@ -349,8 +351,8 @@ class Search extends PureComponent {

- {recent.size > 0 ? this._getOptions().map(({ label, action, forget }, i) => ( - diff --git a/app/javascript/mastodon/locales/vi.json b/app/javascript/mastodon/locales/vi.json index 9de043bb20..c623caa3fb 100644 --- a/app/javascript/mastodon/locales/vi.json +++ b/app/javascript/mastodon/locales/vi.json @@ -358,7 +358,7 @@ "keyboard_shortcuts.my_profile": "mở hồ sơ của bạn", "keyboard_shortcuts.notifications": "mở thông báo", "keyboard_shortcuts.open_media": "mở ảnh hoặc video", - "keyboard_shortcuts.pinned": "mở những tút đã ghim", + "keyboard_shortcuts.pinned": "Open pinned posts list", "keyboard_shortcuts.profile": "mở trang của người đăng tút", "keyboard_shortcuts.reply": "trả lời", "keyboard_shortcuts.requests": "mở danh sách yêu cầu theo dõi", diff --git a/app/javascript/mastodon/store/typed_functions.ts b/app/javascript/mastodon/store/typed_functions.ts index 46a10b8b47..4859b82651 100644 --- a/app/javascript/mastodon/store/typed_functions.ts +++ b/app/javascript/mastodon/store/typed_functions.ts @@ -1,12 +1,11 @@ import { createAsyncThunk } from '@reduxjs/toolkit'; -import type { TypedUseSelectorHook } from 'react-redux'; // eslint-disable-next-line @typescript-eslint/no-restricted-imports import { useDispatch, useSelector } from 'react-redux'; import type { AppDispatch, RootState } from './store'; -export const useAppDispatch: () => AppDispatch = useDispatch; -export const useAppSelector: TypedUseSelectorHook = useSelector; +export const useAppDispatch = useDispatch.withTypes(); +export const useAppSelector = useSelector.withTypes(); export const createAppAsyncThunk = createAsyncThunk.withTypes<{ state: RootState; diff --git a/app/javascript/styles/mailer.scss b/app/javascript/styles/mailer.scss index bd220bb1a8..a2cbb494b4 100644 --- a/app/javascript/styles/mailer.scss +++ b/app/javascript/styles/mailer.scss @@ -100,9 +100,8 @@ table + p { border-top-right-radius: 12px; height: 140px; vertical-align: bottom; - background-color: #f3f2f5; - background-position: center; - background-size: cover; + background-position: center !important; + background-size: cover !important; } .email-account-banner-inner-td { diff --git a/app/lib/status_cache_hydrator.rb b/app/lib/status_cache_hydrator.rb index 3387c0b68b..deead3717d 100644 --- a/app/lib/status_cache_hydrator.rb +++ b/app/lib/status_cache_hydrator.rb @@ -28,11 +28,11 @@ class StatusCacheHydrator def hydrate_non_reblog_payload(empty_payload, account_id, account) empty_payload.tap do |payload| - payload[:favourited] = Favourite.where(account_id: account_id, status_id: @status.id).exists? - payload[:reblogged] = Status.where(account_id: account_id, reblog_of_id: @status.id).exists? - payload[:muted] = ConversationMute.where(account_id: account_id, conversation_id: @status.conversation_id).exists? - payload[:bookmarked] = Bookmark.where(account_id: account_id, status_id: @status.id).exists? - payload[:pinned] = StatusPin.where(account_id: account_id, status_id: @status.id).exists? if @status.account_id == account_id + payload[:favourited] = Favourite.exists?(account_id: account_id, status_id: @status.id) + payload[:reblogged] = Status.exists?(account_id: account_id, reblog_of_id: @status.id) + payload[:muted] = ConversationMute.exists?(account_id: account_id, conversation_id: @status.conversation_id) + payload[:bookmarked] = Bookmark.exists?(account_id: account_id, status_id: @status.id) + payload[:pinned] = StatusPin.exists?(account_id: account_id, status_id: @status.id) if @status.account_id == account_id payload[:filtered] = mapped_applied_custom_filter(account_id, @status) payload[:emoji_reactions] = @status.emoji_reactions_grouped_by_name(account) @@ -43,7 +43,7 @@ class StatusCacheHydrator end end - def hydrate_reblog_payload(empty_payload, account_id, account) # rubocop:disable Metrics/AbcSize + def hydrate_reblog_payload(empty_payload, account_id, account) empty_payload.tap do |payload| payload[:muted] = false payload[:bookmarked] = false @@ -54,11 +54,11 @@ class StatusCacheHydrator # used to create the status, we need to hydrate it here too payload[:reblog][:application] = payload_reblog_application if payload[:reblog][:application].nil? && @status.reblog.account_id == account_id - payload[:reblog][:favourited] = Favourite.where(account_id: account_id, status_id: @status.reblog_of_id).exists? - payload[:reblog][:reblogged] = Status.where(account_id: account_id, reblog_of_id: @status.reblog_of_id).exists? - payload[:reblog][:muted] = ConversationMute.where(account_id: account_id, conversation_id: @status.reblog.conversation_id).exists? - payload[:reblog][:bookmarked] = Bookmark.where(account_id: account_id, status_id: @status.reblog_of_id).exists? - payload[:reblog][:pinned] = StatusPin.where(account_id: account_id, status_id: @status.reblog_of_id).exists? if @status.reblog.account_id == account_id + payload[:reblog][:favourited] = Favourite.exists?(account_id: account_id, status_id: @status.reblog_of_id) + payload[:reblog][:reblogged] = Status.exists?(account_id: account_id, reblog_of_id: @status.reblog_of_id) + payload[:reblog][:muted] = ConversationMute.exists?(account_id: account_id, conversation_id: @status.reblog.conversation_id) + payload[:reblog][:bookmarked] = Bookmark.exists?(account_id: account_id, status_id: @status.reblog_of_id) + payload[:reblog][:pinned] = StatusPin.exists?(account_id: account_id, status_id: @status.reblog_of_id) if @status.reblog.account_id == account_id payload[:reblog][:filtered] = payload[:filtered] payload[:reblog][:emoji_reactions] = @status.reblog.emoji_reactions_grouped_by_name(account) diff --git a/app/models/account.rb b/app/models/account.rb index 71d1a55b02..096fd70810 100644 --- a/app/models/account.rb +++ b/app/models/account.rb @@ -130,10 +130,10 @@ class Account < ApplicationRecord scope :bots, -> { where(actor_type: %w(Application Service)) } scope :groups, -> { where(actor_type: 'Group') } scope :alphabetic, -> { order(domain: :asc, username: :asc) } - scope :matches_username, ->(value) { where('lower((username)::text) ~ lower(?)', value.to_s) } - scope :matches_display_name, ->(value) { where(arel_table[:display_name].matches_regexp(value.to_s)) } - scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) } + scope :matches_username, ->(value) { where('lower((username)::text) LIKE lower(?)', "#{value}%") } + scope :matches_display_name, ->(value) { where(arel_table[:display_name].matches("#{value}%")) } scope :without_unapproved, -> { left_outer_joins(:user).merge(User.approved.confirmed).or(remote) } + scope :auditable, -> { where(id: Admin::ActionLog.select(:account_id).distinct) } scope :searchable, -> { without_unapproved.without_suspended.where(moved_to_account_id: nil) } scope :discoverable, -> { searchable.without_silenced.where(discoverable: true).joins(:account_stat) } scope :by_recent_status, -> { includes(:account_stat).merge(AccountStat.order('last_status_at DESC NULLS LAST')).references(:account_stat) } diff --git a/app/models/admin/action_log_filter.rb b/app/models/admin/action_log_filter.rb index d413cb386d..f581af74e8 100644 --- a/app/models/admin/action_log_filter.rb +++ b/app/models/admin/action_log_filter.rb @@ -72,7 +72,7 @@ class Admin::ActionLogFilter end def results - scope = latest_action_logs.includes(:target) + scope = latest_action_logs.includes(:target, :account) params.each do |key, value| next if key.to_s == 'page' diff --git a/app/models/appeal.rb b/app/models/appeal.rb index f1290ad01a..395056b76f 100644 --- a/app/models/appeal.rb +++ b/app/models/appeal.rb @@ -20,8 +20,11 @@ class Appeal < ApplicationRecord belongs_to :account belongs_to :strike, class_name: 'AccountWarning', foreign_key: 'account_warning_id', inverse_of: :appeal - belongs_to :approved_by_account, class_name: 'Account', optional: true - belongs_to :rejected_by_account, class_name: 'Account', optional: true + + with_options class_name: 'Account', optional: true do + belongs_to :approved_by_account + belongs_to :rejected_by_account + end validates :text, presence: true, length: { maximum: 2_000 } validates :account_warning_id, uniqueness: true diff --git a/app/models/concerns/account/interactions.rb b/app/models/concerns/account/interactions.rb index ede0084b47..d5c232336a 100644 --- a/app/models/concerns/account/interactions.rb +++ b/app/models/concerns/account/interactions.rb @@ -183,7 +183,7 @@ module Account::Interactions end def following?(other_account) - active_relationships.where(target_account: other_account).exists? + active_relationships.exists?(target_account: other_account) end def following_anyone? @@ -212,63 +212,63 @@ module Account::Interactions end def blocking?(other_account) - block_relationships.where(target_account: other_account).exists? + block_relationships.exists?(target_account: other_account) end def domain_blocking?(other_domain) - domain_blocks.where(domain: other_domain).exists? + domain_blocks.exists?(domain: other_domain) end def muting?(other_account) - mute_relationships.where(target_account: other_account).exists? + mute_relationships.exists?(target_account: other_account) end def muting_conversation?(conversation) - conversation_mutes.where(conversation: conversation).exists? + conversation_mutes.exists?(conversation: conversation) end def muting_notifications?(other_account) - mute_relationships.where(target_account: other_account, hide_notifications: true).exists? + mute_relationships.exists?(target_account: other_account, hide_notifications: true) end def muting_reblogs?(other_account) - active_relationships.where(target_account: other_account, show_reblogs: false).exists? + active_relationships.exists?(target_account: other_account, show_reblogs: false) end def requested?(other_account) - follow_requests.where(target_account: other_account).exists? + follow_requests.exists?(target_account: other_account) end def favourited?(status) - status.proper.favourites.where(account: self).exists? + status.proper.favourites.exists?(account: self) end def emoji_reacted?(status, shortcode = nil, domain = nil, domain_force: false) if shortcode.present? if domain.present? || domain_force - status.proper.emoji_reactions.joins(:custom_emoji).where(account: self, name: shortcode, custom_emoji: { domain: domain }).exists? + status.proper.emoji_reactions.joins(:custom_emoji).exists?(account: self, name: shortcode, custom_emoji: { domain: domain }) else - status.proper.emoji_reactions.where(account: self, name: shortcode).exists? + status.proper.emoji_reactions.exists?(account: self, name: shortcode) end else - status.proper.emoji_reactions.where(account: self).exists? + status.proper.emoji_reactions.exists?(account: self) end end def bookmarked?(status) - status.proper.bookmarks.where(account: self).exists? + status.proper.bookmarks.exists?(account: self) end def reblogged?(status) - status.proper.reblogs.where(account: self).exists? + status.proper.reblogs.exists?(account: self) end def pinned?(status) - status_pins.where(status: status).exists? + status_pins.exists?(status: status) end def endorsed?(account) - account_pins.where(target_account: account).exists? + account_pins.exists?(target_account: account) end def status_matches_filters(status) diff --git a/app/models/domain_allow.rb b/app/models/domain_allow.rb index ce9597b4d1..47ada7ac23 100644 --- a/app/models/domain_allow.rb +++ b/app/models/domain_allow.rb @@ -17,8 +17,6 @@ class DomainAllow < ApplicationRecord validates :domain, presence: true, uniqueness: true, domain: true - scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) } - def to_log_human_identifier domain end diff --git a/app/models/domain_block.rb b/app/models/domain_block.rb index 73c091e4aa..a1e6efe336 100644 --- a/app/models/domain_block.rb +++ b/app/models/domain_block.rb @@ -39,7 +39,6 @@ class DomainBlock < ApplicationRecord has_many :accounts, foreign_key: :domain, primary_key: :domain, inverse_of: false, dependent: nil delegate :count, to: :accounts, prefix: true - scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) } scope :with_user_facing_limitations, -> { where(hidden: false) } scope :with_limitations, lambda { where(severity: [:silence, :suspend]) diff --git a/app/models/email_domain_block.rb b/app/models/email_domain_block.rb index f1b14c8b08..40be59420a 100644 --- a/app/models/email_domain_block.rb +++ b/app/models/email_domain_block.rb @@ -21,8 +21,10 @@ class EmailDomainBlock < ApplicationRecord include DomainNormalizable include Paginable - belongs_to :parent, class_name: 'EmailDomainBlock', optional: true - has_many :children, class_name: 'EmailDomainBlock', foreign_key: :parent_id, inverse_of: :parent, dependent: :destroy + with_options class_name: 'EmailDomainBlock' do + belongs_to :parent, optional: true + has_many :children, foreign_key: :parent_id, inverse_of: :parent, dependent: :destroy + end validates :domain, presence: true, uniqueness: true, domain: true diff --git a/app/models/featured_tag.rb b/app/models/featured_tag.rb index 4ea3bf767d..b081099269 100644 --- a/app/models/featured_tag.rb +++ b/app/models/featured_tag.rb @@ -45,7 +45,7 @@ class FeaturedTag < ApplicationRecord end def decrement(deleted_status_id) - update(statuses_count: [0, statuses_count - 1].max, last_status_at: account.statuses.where(visibility: %i(public unlisted public_unlisted login)).tagged_with(tag).where.not(id: deleted_status_id).select(:created_at).first&.created_at) + update(statuses_count: [0, statuses_count - 1].max, last_status_at: visible_tagged_account_statuses.where.not(id: deleted_status_id).select(:created_at).first&.created_at) end private @@ -55,8 +55,8 @@ class FeaturedTag < ApplicationRecord end def reset_data - self.statuses_count = account.statuses.where(visibility: %i(public unlisted public_unlisted login)).tagged_with(tag).count - self.last_status_at = account.statuses.where(visibility: %i(public unlisted public_unlisted login)).tagged_with(tag).select(:created_at).first&.created_at + self.statuses_count = visible_tagged_account_statuses.count + self.last_status_at = visible_tagged_account_statuses.select(:created_at).first&.created_at end def validate_featured_tags_limit @@ -66,6 +66,14 @@ class FeaturedTag < ApplicationRecord end def validate_tag_uniqueness - errors.add(:name, :taken) if FeaturedTag.by_name(name).where(account_id: account_id).exists? + errors.add(:name, :taken) if tag_already_featured_for_account? + end + + def tag_already_featured_for_account? + FeaturedTag.by_name(name).exists?(account_id: account_id) + end + + def visible_tagged_account_statuses + account.statuses.where(visibility: %i(public unlisted public_unlisted login)).tagged_with(tag) end end diff --git a/app/models/instance.rb b/app/models/instance.rb index 09e823bfbb..779727ce9f 100644 --- a/app/models/instance.rb +++ b/app/models/instance.rb @@ -25,6 +25,7 @@ class Instance < ApplicationRecord scope :searchable, -> { where.not(domain: DomainBlock.select(:domain)) } scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) } + scope :domain_starts_with, ->(value) { where(arel_table[:domain].matches("#{sanitize_sql_like(value)}%", false, true)) } scope :by_domain_and_subdomains, ->(domain) { where("reverse('.' || domain) LIKE reverse(?)", "%.#{domain}") } def self.refresh diff --git a/app/models/poll.rb b/app/models/poll.rb index 72f04f00a7..37149c3d86 100644 --- a/app/models/poll.rb +++ b/app/models/poll.rb @@ -27,8 +27,11 @@ class Poll < ApplicationRecord belongs_to :status has_many :votes, class_name: 'PollVote', inverse_of: :poll, dependent: :delete_all - has_many :voters, -> { group('accounts.id') }, through: :votes, class_name: 'Account', source: :account - has_many :local_voters, -> { group('accounts.id').merge(Account.local) }, through: :votes, class_name: 'Account', source: :account + + with_options class_name: 'Account', source: :account, through: :votes do + has_many :voters, -> { group('accounts.id') } + has_many :local_voters, -> { group('accounts.id').merge(Account.local) } + end has_many :notifications, as: :activity, dependent: :destroy diff --git a/app/models/privacy_policy.rb b/app/models/privacy_policy.rb index 36cbf18822..c0d6e1b76d 100644 --- a/app/models/privacy_policy.rb +++ b/app/models/privacy_policy.rb @@ -1,66 +1,7 @@ # frozen_string_literal: true class PrivacyPolicy < ActiveModelSerializers::Model - DEFAULT_PRIVACY_POLICY = <<~TXT - This privacy policy describes how %{domain} ("%{domain}", "we", "us") collects, protects and uses the personally identifiable information you may provide through the %{domain} website or its API. The policy also describes the choices available to you regarding our use of your personal information and how you can access and update this information. This policy does not apply to the practices of companies that %{domain} does not own or control, or to individuals that %{domain} does not employ or manage. - - # What information do we collect? - - - **Basic account information**: If you register on this server, you may be asked to enter a username, an e-mail address and a password. You may also enter additional profile information such as a display name and biography, and upload a profile picture and header image. The username, display name, biography, profile picture and header image are always listed publicly. - - **Posts, following and other public information**: The list of people you follow is listed publicly, the same is true for your followers. When you submit a message, the date and time is stored as well as the application you submitted the message from. Messages may contain media attachments, such as pictures and videos. Public and unlisted posts are available publicly. When you feature a post on your profile, that is also publicly available information. Your posts are delivered to your followers, in some cases it means they are delivered to different servers and copies are stored there. When you delete posts, this is likewise delivered to your followers. The action of reblogging or favouriting another post is always public. - - **Direct and followers-only posts**: All posts are stored and processed on the server. Followers-only posts are delivered to your followers and users who are mentioned in them, and direct posts are delivered only to users mentioned in them. In some cases it means they are delivered to different servers and copies are stored there. We make a good faith effort to limit the access to those posts only to authorized persons, but other servers may fail to do so. Therefore it's important to review servers your followers belong to. You may toggle an option to approve and reject new followers manually in the settings. **Please keep in mind that the operators of the server and any receiving server may view such messages**, and that recipients may screenshot, copy or otherwise re-share them. **Do not share any sensitive information over Mastodon.** - - **IPs and other metadata**: When you log in, we record the IP address you log in from, as well as the name of your browser application. All the logged in sessions are available for your review and revocation in the settings. The latest IP address used is stored for up to 12 months. We also may retain server logs which include the IP address of every request to our server. - - # What do we use your information for? - - Any of the information we collect from you may be used in the following ways: - - - To provide the core functionality of Mastodon. You can only interact with other people's content and post your own content when you are logged in. For example, you may follow other people to view their combined posts in your own personalized home timeline. - - To aid moderation of the community, for example comparing your IP address with other known ones to determine ban evasion or other violations. - - The email address you provide may be used to send you information, notifications about other people interacting with your content or sending you messages, and to respond to inquiries, and/or other requests or questions. - - # How do we protect your information? - - We implement a variety of security measures to maintain the safety of your personal information when you enter, submit, or access your personal information. Among other things, your browser session, as well as the traffic between your applications and the API, are secured with SSL, and your password is hashed using a strong one-way algorithm. You may enable two-factor authentication to further secure access to your account. - - # What is our data retention policy? - - We will make a good faith effort to: - - - Retain server logs containing the IP address of all requests to this server, in so far as such logs are kept, no more than 90 days. - - Retain the IP addresses associated with registered users no more than 12 months. - - You can request and download an archive of your content, including your posts, media attachments, profile picture, and header image. - - You may irreversibly delete your account at any time. - - # Do we use cookies? - - Yes. Cookies are small files that a site or its service provider transfers to your computer's hard drive through your Web browser (if you allow). These cookies enable the site to recognize your browser and, if you have a registered account, associate it with your registered account. - - We use cookies to understand and save your preferences for future visits. - - # Do we disclose any information to outside parties? - - We do not sell, trade, or otherwise transfer to outside parties your personally identifiable information. This does not include trusted third parties who assist us in operating our site, conducting our business, or servicing you, so long as those parties agree to keep this information confidential. We may also release your information when we believe release is appropriate to comply with the law, enforce our site policies, or protect ours or others rights, property, or safety. - - Your public content may be downloaded by other servers in the network. Your public and followers-only posts are delivered to the servers where your followers reside, and direct messages are delivered to the servers of the recipients, in so far as those followers or recipients reside on a different server than this. - - When you authorize an application to use your account, depending on the scope of permissions you approve, it may access your public profile information, your following list, your followers, your lists, all your posts, and your favourites. Applications can never access your e-mail address or password. - - # Site usage by children - - If this server is in the EU or the EEA: Our site, products and services are all directed to people who are at least 16 years old. If you are under the age of 16, per the requirements of the GDPR (General Data Protection Regulation) do not use this site. - - If this server is in the USA: Our site, products and services are all directed to people who are at least 13 years old. If you are under the age of 13, per the requirements of COPPA (Children's Online Privacy Protection Act) do not use this site. - - Law requirements can be different if this server is in another jurisdiction. - - ___ - - This document is CC-BY-SA. Originally adapted from the [Discourse privacy policy](https://github.com/discourse/discourse). - TXT - + DEFAULT_PRIVACY_POLICY = Rails.root.join('config', 'templates', 'privacy-policy.md').read DEFAULT_UPDATED_AT = DateTime.new(2022, 10, 7).freeze attributes :updated_at, :text diff --git a/app/models/report.rb b/app/models/report.rb index c565362cc6..126701b3d6 100644 --- a/app/models/report.rb +++ b/app/models/report.rb @@ -29,9 +29,12 @@ class Report < ApplicationRecord rate_limit by: :account, family: :reports belongs_to :account - belongs_to :target_account, class_name: 'Account' - belongs_to :action_taken_by_account, class_name: 'Account', optional: true - belongs_to :assigned_account, class_name: 'Account', optional: true + + with_options class_name: 'Account' do + belongs_to :target_account + belongs_to :action_taken_by_account, optional: true + belongs_to :assigned_account, optional: true + end has_many :notes, class_name: 'ReportNote', inverse_of: :report, dependent: :destroy has_many :notifications, as: :activity, dependent: :destroy diff --git a/app/models/status.rb b/app/models/status.rb index 3816a88314..1e3cf3c557 100644 --- a/app/models/status.rb +++ b/app/models/status.rb @@ -70,9 +70,11 @@ class Status < ApplicationRecord has_one :owned_conversation, class_name: 'Conversation', foreign_key: 'ancestor_status_id', dependent: :nullify, inverse_of: false belongs_to :preloadable_poll, class_name: 'Poll', foreign_key: 'poll_id', optional: true, inverse_of: false - belongs_to :thread, foreign_key: 'in_reply_to_id', class_name: 'Status', inverse_of: :replies, optional: true - belongs_to :reblog, foreign_key: 'reblog_of_id', class_name: 'Status', inverse_of: :reblogs, optional: true - belongs_to :quote, foreign_key: 'quote_of_id', class_name: 'Status', inverse_of: :quotes, optional: true + with_options class_name: 'Status', optional: true do + belongs_to :thread, foreign_key: 'in_reply_to_id', inverse_of: :replies + belongs_to :reblog, foreign_key: 'reblog_of_id', inverse_of: :reblogs + belongs_to :quote, foreign_key: 'quote_of_id', inverse_of: :quotes + end has_many :favourites, inverse_of: :status, dependent: :destroy has_many :emoji_reactions, inverse_of: :status, dependent: :destroy diff --git a/app/models/user.rb b/app/models/user.rb index f6a5fa2b81..f84654ef8d 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -436,7 +436,7 @@ class User < ApplicationRecord end def sign_up_from_ip_requires_approval? - !sign_up_ip.nil? && IpBlock.where(severity: :sign_up_requires_approval).where('ip >>= ?', sign_up_ip.to_s).exists? + sign_up_ip.present? && IpBlock.sign_up_requires_approval.exists?(['ip >>= ?', sign_up_ip.to_s]) end def sign_up_email_requires_approval? diff --git a/app/serializers/rest/tag_serializer.rb b/app/serializers/rest/tag_serializer.rb index 7801e77d1f..017b572718 100644 --- a/app/serializers/rest/tag_serializer.rb +++ b/app/serializers/rest/tag_serializer.rb @@ -19,7 +19,7 @@ class REST::TagSerializer < ActiveModel::Serializer if instance_options && instance_options[:relationships] instance_options[:relationships].following_map[object.id] || false else - TagFollow.where(tag_id: object.id, account_id: current_user.account_id).exists? + TagFollow.exists?(tag_id: object.id, account_id: current_user.account_id) end end diff --git a/app/services/activitypub/fetch_featured_collection_service.rb b/app/services/activitypub/fetch_featured_collection_service.rb index d2bae08a0e..89c3a1b6c0 100644 --- a/app/services/activitypub/fetch_featured_collection_service.rb +++ b/app/services/activitypub/fetch_featured_collection_service.rb @@ -23,9 +23,9 @@ class ActivityPub::FetchFeaturedCollectionService < BaseService case collection['type'] when 'Collection', 'CollectionPage' - collection['items'] + as_array(collection['items']) when 'OrderedCollection', 'OrderedCollectionPage' - collection['orderedItems'] + as_array(collection['orderedItems']) end end diff --git a/app/services/activitypub/fetch_replies_service.rb b/app/services/activitypub/fetch_replies_service.rb index b5c7759ec5..e2ecdef165 100644 --- a/app/services/activitypub/fetch_replies_service.rb +++ b/app/services/activitypub/fetch_replies_service.rb @@ -26,9 +26,9 @@ class ActivityPub::FetchRepliesService < BaseService case collection['type'] when 'Collection', 'CollectionPage' - collection['items'] + as_array(collection['items']) when 'OrderedCollection', 'OrderedCollectionPage' - collection['orderedItems'] + as_array(collection['orderedItems']) end end @@ -37,7 +37,20 @@ class ActivityPub::FetchRepliesService < BaseService return unless @allow_synchronous_requests return if non_matching_uri_hosts?(@account.uri, collection_or_uri) - fetch_resource_without_id_validation(collection_or_uri, nil, true) + # NOTE: For backward compatibility reasons, Mastodon signs outgoing + # queries incorrectly by default. + # + # While this is relevant for all URLs with query strings, this is + # the only code path where this happens in practice. + # + # Therefore, retry with correct signatures if this fails. + begin + fetch_resource_without_id_validation(collection_or_uri, nil, true) + rescue Mastodon::UnexpectedResponseError => e + raise unless e.response && e.response.code == 401 && Addressable::URI.parse(collection_or_uri).query.present? + + fetch_resource_without_id_validation(collection_or_uri, nil, true, request_options: { with_query_string: true }) + end end def filtered_replies diff --git a/app/services/activitypub/synchronize_followers_service.rb b/app/services/activitypub/synchronize_followers_service.rb index 7ccc917309..f51d671a00 100644 --- a/app/services/activitypub/synchronize_followers_service.rb +++ b/app/services/activitypub/synchronize_followers_service.rb @@ -59,9 +59,9 @@ class ActivityPub::SynchronizeFollowersService < BaseService case collection['type'] when 'Collection', 'CollectionPage' - collection['items'] + as_array(collection['items']) when 'OrderedCollection', 'OrderedCollectionPage' - collection['orderedItems'] + as_array(collection['orderedItems']) end end diff --git a/app/services/keys/query_service.rb b/app/services/keys/query_service.rb index 14c9d9205b..33e13293f3 100644 --- a/app/services/keys/query_service.rb +++ b/app/services/keys/query_service.rb @@ -69,7 +69,7 @@ class Keys::QueryService < BaseService return if json['items'].blank? - @devices = json['items'].map do |device| + @devices = as_array(json['items']).map do |device| Device.new(device_id: device['id'], name: device['name'], identity_key: device.dig('identityKey', 'publicKeyBase64'), fingerprint_key: device.dig('fingerprintKey', 'publicKeyBase64'), claim_url: device['claim']) end rescue HTTP::Error, OpenSSL::SSL::SSLError, Mastodon::Error => e diff --git a/config/locales/devise.fi.yml b/config/locales/devise.fi.yml index bedf8a56f6..ac7a57c6f8 100644 --- a/config/locales/devise.fi.yml +++ b/config/locales/devise.fi.yml @@ -47,14 +47,19 @@ fi: subject: 'Mastodon: ohjeet salasanan vaihtoon' title: Salasanan vaihto two_factor_disabled: + explanation: Olet nyt mahdollistanut sisäänkirjautumisen pelkästään sähköpostiosoitteella ja salasanalla. subject: 'Mastodon: kaksivaiheinen todennus poistettu käytöstä' + subtitle: Kaksivaiheinen tunnistautuminen käyttäjätilillesi on poistettu käytöstä. title: 2-vaiheinen todennus pois käytöstä two_factor_enabled: + explanation: Sisäänkirjautuminen edellyttää liitetyn TOTP-sovelluksen luomaa aikarajattua kertatunnuslukua. subject: 'Mastodon: kaksivaiheinen todennus otettu käyttöön' + subtitle: Kaksivaiheinen kirjautuminen tilillesi on määritetty käyttöön. title: 2-vaiheinen todennus käytössä two_factor_recovery_codes_changed: explanation: Uudet palautuskoodit on nyt luotu ja vanhat on mitätöity. subject: 'Mastodon: kaksivaiheisen todennuksen palautuskoodit luotiin uudelleen' + subtitle: Aiemmat palautuskoodit on mitätöity, ja korvaavat uudet koodit on luotu. title: 2-vaiheisen todennuksen palautuskoodit vaihdettiin unlock_instructions: subject: 'Mastodon: lukituksen poistamisen ohjeet' @@ -68,9 +73,13 @@ fi: subject: 'Mastodon: suojausavain poistettu' title: Yksi suojausavaimistasi on poistettu webauthn_disabled: + explanation: Turva-avaimin kirjautuminen tilillesi on kytketty pois käytöstä. + extra: Olet nyt mahdollistanut sisäänkirjautumisen käyttäjätilillesi pelkästään palveluun liitetyn TOTP-sovelluksen luomalla aikarajoitteisella kertatunnusluvulla. subject: 'Mastodon: Todennus suojausavaimilla poistettu käytöstä' title: Suojausavaimet poistettu käytöstä webauthn_enabled: + explanation: Turva-avainkirjautuminen käyttäjätilillesi on otettu käyttöön. + extra: Voit nyt kirjautua sisään käyttäen turva-avaintasi. subject: 'Mastodon: Todennus suojausavaimella on otettu käyttöön' title: Suojausavaimet käytössä omniauth_callbacks: diff --git a/config/locales/devise.hu.yml b/config/locales/devise.hu.yml index 522ac66ad3..fea56ab24a 100644 --- a/config/locales/devise.hu.yml +++ b/config/locales/devise.hu.yml @@ -47,14 +47,19 @@ hu: subject: 'Mastodon: Jelszóvisszaállítási utasítások' title: Jelszó visszaállítása two_factor_disabled: + explanation: A bejelentkezés most már csupán email címmel és jelszóval lehetséges. subject: Kétlépcsős azonosítás kikapcsolva + subtitle: A kétlépcsős hitelesítés a fiókodhoz ki lett kapcsolva. title: Kétlépcsős hitelesítés kikapcsolva two_factor_enabled: + explanation: Egy párosított TOTP appal generált tokenre lesz szükség a bejelentkezéshez. subject: 'Mastodon: Kétlépcsős azonosítás engedélyezve' + subtitle: A kétlépcsős hitelesítés a fiókodhoz aktiválva lett. title: Kétlépcsős hitelesítés engedélyezve two_factor_recovery_codes_changed: explanation: A korábbi helyreállítási kódok letiltásra és újragenerálásra kerültek. subject: 'Mastodon: Kétlépcsős helyreállítási kódok újból előállítva' + subtitle: A korábbi helyreállítási kódokat letiltottuk, és újakat generáltunk. title: A kétlépcsős kódok megváltoztak unlock_instructions: subject: 'Mastodon: Feloldási utasítások' @@ -68,9 +73,13 @@ hu: subject: 'Mastodon: A biztonsági kulcs törlésre került' title: Az egyik biztonsági kulcsodat törölték webauthn_disabled: + explanation: A biztonsági kulcsokkal történő hitelesítés a fiókodhoz ki lett kapcsolva. + extra: A bejelentkezés most már csak TOTP app által generált tokennel lehetséges. subject: 'Mastodon: A biztonsági kulccsal történő hitelesítés letiltásra került' title: A biztonsági kulcsok letiltásra kerültek webauthn_enabled: + explanation: A biztonsági kulcsokkal történő hitelesítés a fiókodhoz aktiválva lett. + extra: A biztonsági kulcsodat mostantól lehet bejelentkezésre használni. subject: 'Mastodon: A biztonsági kulcsos hitelesítés engedélyezésre került' title: A biztonsági kulcsok engedélyezésre kerültek omniauth_callbacks: diff --git a/config/locales/devise.ko.yml b/config/locales/devise.ko.yml index 88865aec58..0c848e4bac 100644 --- a/config/locales/devise.ko.yml +++ b/config/locales/devise.ko.yml @@ -47,14 +47,19 @@ ko: subject: 'Mastodon: 암호 재설정 설명' title: 암호 재설정 two_factor_disabled: + explanation: 이제 이메일과 암호만 이용해서 로그인이 가능합니다. subject: '마스토돈: 이중 인증 비활성화' + subtitle: 계정에 대한 2단계 인증이 비활성화되었습니다. title: 2FA 비활성화 됨 two_factor_enabled: + explanation: 로그인 하기 위해서는 짝이 되는 TOTP 앱에서 생성한 토큰이 필요합니다. subject: '마스토돈: 이중 인증 활성화' + subtitle: 계정에 대한 2단계 인증이 활성화되었습니다. title: 2FA 활성화 됨 two_factor_recovery_codes_changed: explanation: 이전 복구 코드가 무효화되고 새 코드가 생성되었습니다 subject: '마스토돈: 이중 인증 복구 코드 재생성 됨' + subtitle: 이전 복구 코드가 무효화되고 새 코드가 생성되었습니다. title: 2FA 복구 코드 변경됨 unlock_instructions: subject: '마스토돈: 잠금 해제 방법' @@ -68,9 +73,13 @@ ko: subject: '마스토돈: 보안 키 삭제' title: 보안 키가 삭제되었습니다 webauthn_disabled: + explanation: 계정의 보안 키 인증이 비활성화되었습니다 + extra: 이제 TOTP 앱에서 생성한 토큰을 통해서만 로그인 가능합니다. subject: '마스토돈: 보안 키를 이용한 인증이 비활성화 됨' title: 보안 키 비활성화 됨 webauthn_enabled: + explanation: 계정에 대한 보안키 인증이 활성화되었습니다. + extra: 로그인시 보안키가 사용됩니다. subject: '마스토돈: 보안 키 인증 활성화 됨' title: 보안 키 활성화 됨 omniauth_callbacks: diff --git a/config/locales/en.yml b/config/locales/en.yml index 6c4f4c0d66..48f361d70b 100644 --- a/config/locales/en.yml +++ b/config/locales/en.yml @@ -2081,6 +2081,7 @@ en: go_to_sso_account_settings: Go to your identity provider's account settings invalid_otp_token: Invalid two-factor code otp_lost_help_html: If you lost access to both, you may get in touch with %{email} + rate_limited: Too many authentication attempts, try again later. seamless_external_login: You are logged in via an external service, so password and e-mail settings are not available. signed_in_as: 'Signed in as:' verification: diff --git a/config/locales/fi.yml b/config/locales/fi.yml index a719f3496f..26fe6b7f02 100644 --- a/config/locales/fi.yml +++ b/config/locales/fi.yml @@ -1608,6 +1608,7 @@ fi: unknown_browser: Tuntematon selain weibo: Weibo current_session: Nykyinen istunto + date: Päiväys description: "%{browser} alustalla %{platform}" explanation: Nämä verkkoselaimet ovat tällä hetkellä kirjautuneena Mastodon-tilillesi. ip: IP-osoite @@ -1774,14 +1775,19 @@ fi: webauthn: Suojausavaimet user_mailer: appeal_approved: + action: Tilin asetukset explanation: Valitus tiliäsi koskevasta varoituksesta %{strike_date} jonka lähetit %{appeal_date} on hyväksytty. Tilisi on jälleen hyvässä kunnossa. subject: Valituksesi %{date} on hyväksytty + subtitle: Tilisi on jälleen normaalissa tilassa. title: Valitus hyväksytty appeal_rejected: explanation: Valitus tiliäsi koskevasta varoituksesta %{strike_date} jonka lähetit %{appeal_date} on hylätty. subject: Valituksesi %{date} on hylätty + subtitle: Vetoomuksesi on hylätty. title: Valitus hylätty backup_ready: + explanation: Olet pyytänyt täysvarmuuskopion Mastodon-tilistäsi. + extra: Se on nyt valmis ladattavaksi! subject: Arkisto on valmiina ladattavaksi title: Arkiston tallennus suspicious_sign_in: diff --git a/config/locales/hu.yml b/config/locales/hu.yml index 536af8b6b5..7cfd7d80e2 100644 --- a/config/locales/hu.yml +++ b/config/locales/hu.yml @@ -1608,6 +1608,7 @@ hu: unknown_browser: Ismeretlen böngésző weibo: Weibo current_session: Jelenlegi munkamenet + date: Dátum description: "%{browser} az alábbi platformon: %{platform}" explanation: Jelenleg az alábbi böngészőkkel vagy bejelentkezve a fiókodba. ip: IP @@ -1774,14 +1775,19 @@ hu: webauthn: Biztonsági kulcsok user_mailer: appeal_approved: + action: Fiók Beállításai explanation: A fiókod %{appeal_date}-i fellebbezése, mely a %{strike_date}-i vétségeddel kapcsolatos, jóváhagyásra került. A fiókod megint makulátlan. subject: A %{date}-i fellebbezésedet jóváhagyták + subtitle: A fiókod ismét használható állapotban van. title: Fellebbezés jóváhagyva appeal_rejected: explanation: A %{appeal_date}-i fellebbezésed, amely a fiókod %{strike_date}-i vétségével kapcsolatos, elutasításra került. subject: A %{date}-i fellebbezésedet visszautasították + subtitle: A fellebbezésedet visszautasították. title: Fellebbezés visszautasítva backup_ready: + explanation: A Mastodon fiókod teljes biztonsági mentését kérted. + extra: Már letöltésre kész! subject: Az adataidról készült archív letöltésre kész title: Archiválás suspicious_sign_in: diff --git a/config/locales/ko.yml b/config/locales/ko.yml index b0eadc0504..b85b9b5861 100644 --- a/config/locales/ko.yml +++ b/config/locales/ko.yml @@ -1584,6 +1584,7 @@ ko: unknown_browser: 알 수 없는 브라우저 weibo: 웨이보 current_session: 현재 세션 + date: 날짜 description: "%{platform}의 %{browser}" explanation: 내 마스토돈 계정에 로그인되어 있는 웹 브라우저 목록입니다. ip: IP @@ -1744,14 +1745,19 @@ ko: webauthn: 보안 키 user_mailer: appeal_approved: + action: 계정 설정 explanation: "%{strike_date}에 일어난 중재결정에 대한 소명을 %{appeal_date}에 작성했으며 승낙되었습니다. 당신의 계정은 정상적인 상태로 돌아왔습니다." subject: 귀하가 %{date}에 작성한 소명이 승낙되었습니다 + subtitle: 계정이 다시 정상적인 상태입니다. title: 소명이 받아들여짐 appeal_rejected: explanation: "%{strike_date}에 일어난 중재결정에 대한 소명을 %{appeal_date}에 작성했지만 반려되었습니다." subject: "%{date}에 작성한 소명이 반려되었습니다." + subtitle: 소명이 기각되었습니다. title: 이의 제기가 거절되었습니다 backup_ready: + explanation: 마스토돈 계정에 대한 전체 백업을 요청했습니다 + extra: 다운로드 할 준비가 되었습니다! subject: 아카이브를 다운로드할 수 있습니다 title: 아카이브 테이크아웃 suspicious_sign_in: diff --git a/config/locales/sk.yml b/config/locales/sk.yml index fdd64b5bb7..89f456a205 100644 --- a/config/locales/sk.yml +++ b/config/locales/sk.yml @@ -633,6 +633,7 @@ sk: documentation_link: Zisti viac release_notes: Poznámky k vydaniu title: Dostupné aktualizácie + type: Druh types: major: Hlavné vydanie patch: Opravné vydanie - opravy a jednoducho uplatniteľné zmeny @@ -641,6 +642,7 @@ sk: account: Autor application: Aplikácia back_to_account: Späť na účet + back_to_report: Späť na stránku hlásenia batch: remove_from_report: Vymaž z hlásenia report: Hlásenie diff --git a/config/templates/privacy-policy.md b/config/templates/privacy-policy.md new file mode 100644 index 0000000000..9e042af80a --- /dev/null +++ b/config/templates/privacy-policy.md @@ -0,0 +1,128 @@ +This privacy policy describes how %{domain}s ("%{domain}s", "we", "us") +collects, protects and uses the personally identifiable information you may +provide through the %{domain}s website or its API. The policy also +describes the choices available to you regarding our use of your personal +information and how you can access and update this information. This policy +does not apply to the practices of companies that %{domain}s does not own +or control, or to individuals that %{domain}s does not employ or manage. + +# What information do we collect? + +- **Basic account information**: If you register on this server, you may be + asked to enter a username, an e-mail address and a password. You may also + enter additional profile information such as a display name and biography, and + upload a profile picture and header image. The username, display name, + biography, profile picture and header image are always listed publicly. +- **Posts, following and other public information**: The list of people you + follow is listed publicly, the same is true for your followers. When you + submit a message, the date and time is stored as well as the application you + submitted the message from. Messages may contain media attachments, such as + pictures and videos. Public and unlisted posts are available publicly. When + you feature a post on your profile, that is also publicly available + information. Your posts are delivered to your followers, in some cases it + means they are delivered to different servers and copies are stored there. + When you delete posts, this is likewise delivered to your followers. The + action of reblogging or favouriting another post is always public. +- **Direct and followers-only posts**: All posts are stored and processed on the + server. Followers-only posts are delivered to your followers and users who are + mentioned in them, and direct posts are delivered only to users mentioned in + them. In some cases it means they are delivered to different servers and + copies are stored there. We make a good faith effort to limit the access to + those posts only to authorized persons, but other servers may fail to do so. + Therefore it's important to review servers your followers belong to. You may + toggle an option to approve and reject new followers manually in the settings. + **Please keep in mind that the operators of the server and any receiving + server may view such messages**, and that recipients may screenshot, copy or + otherwise re-share them. **Do not share any sensitive information over + Mastodon.** +- **IPs and other metadata**: When you log in, we record the IP address you log + in from, as well as the name of your browser application. All the logged in + sessions are available for your review and revocation in the settings. The + latest IP address used is stored for up to 12 months. We also may retain + server logs which include the IP address of every request to our server. + +# What do we use your information for? + +Any of the information we collect from you may be used in the following ways: + +- To provide the core functionality of Mastodon. You can only interact with + other people's content and post your own content when you are logged in. For + example, you may follow other people to view their combined posts in your own + personalized home timeline. +- To aid moderation of the community, for example comparing your IP address with + other known ones to determine ban evasion or other violations. +- The email address you provide may be used to send you information, + notifications about other people interacting with your content or sending you + messages, and to respond to inquiries, and/or other requests or questions. + +# How do we protect your information? + +We implement a variety of security measures to maintain the safety of your +personal information when you enter, submit, or access your personal +information. Among other things, your browser session, as well as the traffic +between your applications and the API, are secured with SSL, and your password +is hashed using a strong one-way algorithm. You may enable two-factor +authentication to further secure access to your account. + +# What is our data retention policy? + +We will make a good faith effort to: + +- Retain server logs containing the IP address of all requests to this server, + in so far as such logs are kept, no more than 90 days. +- Retain the IP addresses associated with registered users no more than 12 + months. + +You can request and download an archive of your content, including your posts, +media attachments, profile picture, and header image. + +You may irreversibly delete your account at any time. + +# Do we use cookies? + +Yes. Cookies are small files that a site or its service provider transfers to +your computer's hard drive through your Web browser (if you allow). These +cookies enable the site to recognize your browser and, if you have a registered +account, associate it with your registered account. + +We use cookies to understand and save your preferences for future visits. + +# Do we disclose any information to outside parties? + +We do not sell, trade, or otherwise transfer to outside parties your personally +identifiable information. This does not include trusted third parties who assist +us in operating our site, conducting our business, or servicing you, so long as +those parties agree to keep this information confidential. We may also release +your information when we believe release is appropriate to comply with the law, +enforce our site policies, or protect ours or others rights, property, or +safety. + +Your public content may be downloaded by other servers in the network. Your +public and followers-only posts are delivered to the servers where your +followers reside, and direct messages are delivered to the servers of the +recipients, in so far as those followers or recipients reside on a different +server than this. + +When you authorize an application to use your account, depending on the scope of +permissions you approve, it may access your public profile information, your +following list, your followers, your lists, all your posts, and your favourites. +Applications can never access your e-mail address or password. + +# Site usage by children + +If this server is in the EU or the EEA: Our site, products and services are all +directed to people who are at least 16 years old. If you are under the age of +16, per the requirements of the GDPR (General Data Protection Regulation) do not +use this site. + +If this server is in the USA: Our site, products and services are all directed +to people who are at least 13 years old. If you are under the age of 13, per the +requirements of COPPA (Children's Online Privacy Protection Act) do not use this +site. + +Law requirements can be different if this server is in another jurisdiction. + +--- + +This document is CC-BY-SA. Originally adapted from the [Discourse privacy +policy](https://github.com/discourse/discourse). diff --git a/db/migrate/20180812173710_copy_status_stats.rb b/db/migrate/20180812173710_copy_status_stats.rb index ca2892cf62..5933fb36d7 100644 --- a/db/migrate/20180812173710_copy_status_stats.rb +++ b/db/migrate/20180812173710_copy_status_stats.rb @@ -24,8 +24,7 @@ class CopyStatusStats < ActiveRecord::Migration[5.2] private def supports_upsert? - version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i - version >= 90_500 + ActiveRecord::Base.connection.database_version >= 90_500 end def up_fast diff --git a/db/migrate/20181116173541_copy_account_stats.rb b/db/migrate/20181116173541_copy_account_stats.rb index 9070200fee..e5faee0cb5 100644 --- a/db/migrate/20181116173541_copy_account_stats.rb +++ b/db/migrate/20181116173541_copy_account_stats.rb @@ -24,8 +24,7 @@ class CopyAccountStats < ActiveRecord::Migration[5.2] private def supports_upsert? - version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i - version >= 90_500 + ActiveRecord::Base.connection.database_version >= 90_500 end def up_fast diff --git a/db/post_migrate/20230803082451_add_unique_index_on_preview_cards_statuses.rb b/db/post_migrate/20230803082451_add_unique_index_on_preview_cards_statuses.rb index d29d7847c5..4271f8c08a 100644 --- a/db/post_migrate/20230803082451_add_unique_index_on_preview_cards_statuses.rb +++ b/db/post_migrate/20230803082451_add_unique_index_on_preview_cards_statuses.rb @@ -17,8 +17,7 @@ class AddUniqueIndexOnPreviewCardsStatuses < ActiveRecord::Migration[6.1] def supports_concurrent_reindex? @supports_concurrent_reindex ||= begin - version = select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i - version >= 120_000 + ActiveRecord::Base.connection.database_version >= 120_000 end end diff --git a/lib/mastodon/cli/maintenance.rb b/lib/mastodon/cli/maintenance.rb index f37662aa06..e2ea866152 100644 --- a/lib/mastodon/cli/maintenance.rb +++ b/lib/mastodon/cli/maintenance.rb @@ -223,7 +223,7 @@ module Mastodon::CLI say 'Deduplicating accounts… for local accounts, you will be asked to chose which account to keep unchanged.' find_duplicate_accounts.each do |row| - accounts = Account.where(id: row['ids'].split(',')).to_a + accounts = Account.where(id: row['ids'].split(',')) if accounts.first.local? deduplicate_local_accounts!(accounts) @@ -275,7 +275,7 @@ module Mastodon::CLI def deduplicate_users_process_email ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users GROUP BY email HAVING count(*) > 1").each do |row| - users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse + users = User.where(id: row['ids'].split(',')).order(updated_at: :desc).to_a ref_user = users.shift say "Multiple users registered with e-mail address #{ref_user.email}.", :yellow say "e-mail will be disabled for the following accounts: #{users.map { |user| user.account.acct }.join(', ')}", :yellow @@ -289,7 +289,7 @@ module Mastodon::CLI def deduplicate_users_process_confirmation_token ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE confirmation_token IS NOT NULL GROUP BY confirmation_token HAVING count(*) > 1").each do |row| - users = User.where(id: row['ids'].split(',')).sort_by(&:created_at).reverse.drop(1) + users = User.where(id: row['ids'].split(',')).order(created_at: :desc).to_a.drop(1) say "Unsetting confirmation token for those accounts: #{users.map { |user| user.account.acct }.join(', ')}", :yellow users.each do |user| @@ -301,7 +301,7 @@ module Mastodon::CLI def deduplicate_users_process_remember_token if migrator_version < 2022_01_18_183010 ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE remember_token IS NOT NULL GROUP BY remember_token HAVING count(*) > 1").each do |row| - users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse.drop(1) + users = User.where(id: row['ids'].split(',')).order(updated_at: :desc).to_a.drop(1) say "Unsetting remember token for those accounts: #{users.map { |user| user.account.acct }.join(', ')}", :yellow users.each do |user| @@ -313,7 +313,7 @@ module Mastodon::CLI def deduplicate_users_process_password_token ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM users WHERE reset_password_token IS NOT NULL GROUP BY reset_password_token HAVING count(*) > 1").each do |row| - users = User.where(id: row['ids'].split(',')).sort_by(&:updated_at).reverse.drop(1) + users = User.where(id: row['ids'].split(',')).order(updated_at: :desc).to_a.drop(1) say "Unsetting password reset token for those accounts: #{users.map { |user| user.account.acct }.join(', ')}", :yellow users.each do |user| @@ -341,7 +341,7 @@ module Mastodon::CLI say 'Removing duplicate account identity proofs…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM account_identity_proofs GROUP BY account_id, provider, provider_username HAVING count(*) > 1").each do |row| - AccountIdentityProof.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy) + AccountIdentityProof.where(id: row['ids'].split(',')).order(id: :desc).to_a.drop(1).each(&:destroy) end say 'Restoring account identity proofs indexes…' @@ -355,7 +355,7 @@ module Mastodon::CLI say 'Removing duplicate announcement reactions…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM announcement_reactions GROUP BY account_id, announcement_id, name HAVING count(*) > 1").each do |row| - AnnouncementReaction.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy) + AnnouncementReaction.where(id: row['ids'].split(',')).order(id: :desc).to_a.drop(1).each(&:destroy) end say 'Restoring announcement_reactions indexes…' @@ -367,7 +367,7 @@ module Mastodon::CLI say 'Deduplicating conversations…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM conversations WHERE uri IS NOT NULL GROUP BY uri HAVING count(*) > 1").each do |row| - conversations = Conversation.where(id: row['ids'].split(',')).sort_by(&:id).reverse + conversations = Conversation.where(id: row['ids'].split(',')).order(id: :desc).to_a ref_conversation = conversations.shift @@ -390,7 +390,7 @@ module Mastodon::CLI say 'Deduplicating custom_emojis…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM custom_emojis GROUP BY shortcode, domain HAVING count(*) > 1").each do |row| - emojis = CustomEmoji.where(id: row['ids'].split(',')).sort_by(&:id).reverse + emojis = CustomEmoji.where(id: row['ids'].split(',')).order(id: :desc).to_a ref_emoji = emojis.shift @@ -409,7 +409,7 @@ module Mastodon::CLI say 'Deduplicating custom_emoji_categories…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM custom_emoji_categories GROUP BY name HAVING count(*) > 1").each do |row| - categories = CustomEmojiCategory.where(id: row['ids'].split(',')).sort_by(&:id).reverse + categories = CustomEmojiCategory.where(id: row['ids'].split(',')).order(id: :desc).to_a ref_category = categories.shift @@ -428,7 +428,7 @@ module Mastodon::CLI say 'Deduplicating domain_allows…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM domain_allows GROUP BY domain HAVING count(*) > 1").each do |row| - DomainAllow.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy) + DomainAllow.where(id: row['ids'].split(',')).order(id: :desc).to_a.drop(1).each(&:destroy) end say 'Restoring domain_allows indexes…' @@ -466,7 +466,7 @@ module Mastodon::CLI say 'Deduplicating unavailable_domains…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM unavailable_domains GROUP BY domain HAVING count(*) > 1").each do |row| - UnavailableDomain.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy) + UnavailableDomain.where(id: row['ids'].split(',')).order(id: :desc).to_a.drop(1).each(&:destroy) end say 'Restoring unavailable_domains indexes…' @@ -478,7 +478,7 @@ module Mastodon::CLI say 'Deduplicating email_domain_blocks…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM email_domain_blocks GROUP BY domain HAVING count(*) > 1").each do |row| - domain_blocks = EmailDomainBlock.where(id: row['ids'].split(',')).sort_by { |b| b.parent.nil? ? 1 : 0 }.to_a + domain_blocks = EmailDomainBlock.where(id: row['ids'].split(',')).order(EmailDomainBlock.arel_table[:parent_id].asc.nulls_first).to_a domain_blocks.drop(1).each(&:destroy) end @@ -507,7 +507,7 @@ module Mastodon::CLI say 'Deduplicating preview_cards…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM preview_cards GROUP BY url HAVING count(*) > 1").each do |row| - PreviewCard.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy) + PreviewCard.where(id: row['ids'].split(',')).order(id: :desc).to_a.drop(1).each(&:destroy) end say 'Restoring preview_cards indexes…' @@ -519,7 +519,7 @@ module Mastodon::CLI say 'Deduplicating statuses…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM statuses WHERE uri IS NOT NULL GROUP BY uri HAVING count(*) > 1").each do |row| - statuses = Status.where(id: row['ids'].split(',')).sort_by(&:id) + statuses = Status.where(id: row['ids'].split(',')).order(id: :asc).to_a ref_status = statuses.shift statuses.each do |status| merge_statuses!(ref_status, status) if status.account_id == ref_status.account_id @@ -541,7 +541,7 @@ module Mastodon::CLI say 'Deduplicating tags…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM tags GROUP BY lower((name)::text) HAVING count(*) > 1").each do |row| - tags = Tag.where(id: row['ids'].split(',')).sort_by { |t| [t.usable?, t.trendable?, t.listable?].count(false) } + tags = Tag.where(id: row['ids'].split(',')).order(Arel.sql('(usable::int + trendable::int + listable::int) desc')).to_a ref_tag = tags.shift tags.each do |tag| merge_tags!(ref_tag, tag) @@ -564,7 +564,7 @@ module Mastodon::CLI say 'Deduplicating webauthn_credentials…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM webauthn_credentials GROUP BY external_id HAVING count(*) > 1").each do |row| - WebauthnCredential.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy) + WebauthnCredential.where(id: row['ids'].split(',')).order(id: :desc).to_a.drop(1).each(&:destroy) end say 'Restoring webauthn_credentials indexes…' @@ -578,7 +578,7 @@ module Mastodon::CLI say 'Deduplicating webhooks…' ActiveRecord::Base.connection.select_all("SELECT string_agg(id::text, ',') AS ids FROM webhooks GROUP BY url HAVING count(*) > 1").each do |row| - Webhook.where(id: row['ids'].split(',')).sort_by(&:id).reverse.drop(1).each(&:destroy) + Webhook.where(id: row['ids'].split(',')).order(id: :desc).drop(1).each(&:destroy) end say 'Restoring webhooks indexes…' @@ -590,8 +590,8 @@ module Mastodon::CLI SoftwareUpdate.delete_all end - def deduplicate_local_accounts!(accounts) - accounts = accounts.sort_by(&:id).reverse + def deduplicate_local_accounts!(scope) + accounts = scope.order(id: :desc).to_a say "Multiple local accounts were found for username '#{accounts.first.username}'.", :yellow say 'All those accounts are distinct accounts but only the most recently-created one is fully-functional.', :yellow @@ -629,8 +629,8 @@ module Mastodon::CLI end end - def deduplicate_remote_accounts!(accounts) - accounts = accounts.sort_by(&:updated_at).reverse + def deduplicate_remote_accounts!(scope) + accounts = scope.order(updated_at: :desc).to_a reference_account = accounts.shift diff --git a/lib/mastodon/migration_helpers.rb b/lib/mastodon/migration_helpers.rb index 1a2ce6420f..a713f42d41 100644 --- a/lib/mastodon/migration_helpers.rb +++ b/lib/mastodon/migration_helpers.rb @@ -8,15 +8,15 @@ # shorten temporary column names. # Documentation on using these functions (and why one might do so): -# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/development/what_requires_downtime.md +# https://gitlab.com/gitlab-org/gitlab-foss/-/blob/master/doc/development/database/avoiding_downtime_in_migrations.md -# The file itself: -# https://gitlab.com/gitlab-org/gitlab-ce/blob/master/lib/gitlab/database/migration_helpers.rb +# The original file (since updated): +# https://gitlab.com/gitlab-org/gitlab-foss/-/blob/master/lib/gitlab/database/migration_helpers.rb # It is licensed as follows: -# Copyright (c) 2011-2017 GitLab B.V. - +# Copyright (c) 2011-present GitLab B.V. +# # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights @@ -24,16 +24,16 @@ # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. # This is bad form, but there are enough differences that it's impractical to do # otherwise: @@ -77,37 +77,12 @@ module Mastodon end end - BACKGROUND_MIGRATION_BATCH_SIZE = 1000 # Number of rows to process per job - BACKGROUND_MIGRATION_JOB_BUFFER_SIZE = 1000 # Number of jobs to bulk queue at a time - # Gets an estimated number of rows for a table def estimate_rows_in_table(table_name) exec_query('SELECT reltuples FROM pg_class WHERE relname = ' + "'#{table_name}'").to_a.first['reltuples'] end - # Adds `created_at` and `updated_at` columns with timezone information. - # - # This method is an improved version of Rails' built-in method `add_timestamps`. - # - # Available options are: - # default - The default value for the column. - # null - When set to `true` the column will allow NULL values. - # The default is to not allow NULL values. - def add_timestamps_with_timezone(table_name, **options) - options[:null] = false if options[:null].nil? - - [:created_at, :updated_at].each do |column_name| - if options[:default] && transaction_open? - raise '`add_timestamps_with_timezone` with default value cannot be run inside a transaction. ' \ - 'You can disable transactions by calling `disable_ddl_transaction!` ' \ - 'in the body of your migration class' - end - - add_column(table_name, column_name, :datetime_with_timezone, **options) - end - end - # Creates a new index, concurrently when supported # # On PostgreSQL this method creates an index concurrently, on MySQL this @@ -746,39 +721,6 @@ module Mastodon rename_index table_name, "#{index_name}_new", index_name end - # This will replace the first occurrence of a string in a column with - # the replacement - # On postgresql we can use `regexp_replace` for that. - # On mysql we find the location of the pattern, and overwrite it - # with the replacement - def replace_sql(column, pattern, replacement) - quoted_pattern = Arel::Nodes::Quoted.new(pattern.to_s) - quoted_replacement = Arel::Nodes::Quoted.new(replacement.to_s) - - replace = Arel::Nodes::NamedFunction - .new("regexp_replace", [column, quoted_pattern, quoted_replacement]) - Arel::Nodes::SqlLiteral.new(replace.to_sql) - end - - def remove_foreign_key_without_error(*args) - remove_foreign_key(*args) - rescue ArgumentError - end - - def sidekiq_queue_migrate(queue_from, to:) - while sidekiq_queue_length(queue_from) > 0 - Sidekiq.redis do |conn| - conn.rpoplpush "queue:#{queue_from}", "queue:#{to}" - end - end - end - - def sidekiq_queue_length(queue_name) - Sidekiq.redis do |conn| - conn.llen("queue:#{queue_name}") - end - end - def check_trigger_permissions!(table) unless Grant.create_and_execute_trigger?(table) dbname = ActiveRecord::Base.configurations[Rails.env]['database'] @@ -799,91 +741,6 @@ into similar problems in the future (e.g. when new tables are created). end end - # Bulk queues background migration jobs for an entire table, batched by ID range. - # "Bulk" meaning many jobs will be pushed at a time for efficiency. - # If you need a delay interval per job, then use `queue_background_migration_jobs_by_range_at_intervals`. - # - # model_class - The table being iterated over - # job_class_name - The background migration job class as a string - # batch_size - The maximum number of rows per job - # - # Example: - # - # class Route < ActiveRecord::Base - # include EachBatch - # self.table_name = 'routes' - # end - # - # bulk_queue_background_migration_jobs_by_range(Route, 'ProcessRoutes') - # - # Where the model_class includes EachBatch, and the background migration exists: - # - # class Gitlab::BackgroundMigration::ProcessRoutes - # def perform(start_id, end_id) - # # do something - # end - # end - def bulk_queue_background_migration_jobs_by_range(model_class, job_class_name, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE) - raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id') - - jobs = [] - - model_class.each_batch(of: batch_size) do |relation| - start_id, end_id = relation.pluck('MIN(id), MAX(id)').first - - if jobs.length >= BACKGROUND_MIGRATION_JOB_BUFFER_SIZE - # Note: This code path generally only helps with many millions of rows - # We push multiple jobs at a time to reduce the time spent in - # Sidekiq/Redis operations. We're using this buffer based approach so we - # don't need to run additional queries for every range. - BackgroundMigrationWorker.perform_bulk(jobs) - jobs.clear - end - - jobs << [job_class_name, [start_id, end_id]] - end - - BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty? - end - - # Queues background migration jobs for an entire table, batched by ID range. - # Each job is scheduled with a `delay_interval` in between. - # If you use a small interval, then some jobs may run at the same time. - # - # model_class - The table being iterated over - # job_class_name - The background migration job class as a string - # delay_interval - The duration between each job's scheduled time (must respond to `to_f`) - # batch_size - The maximum number of rows per job - # - # Example: - # - # class Route < ActiveRecord::Base - # include EachBatch - # self.table_name = 'routes' - # end - # - # queue_background_migration_jobs_by_range_at_intervals(Route, 'ProcessRoutes', 1.minute) - # - # Where the model_class includes EachBatch, and the background migration exists: - # - # class Gitlab::BackgroundMigration::ProcessRoutes - # def perform(start_id, end_id) - # # do something - # end - # end - def queue_background_migration_jobs_by_range_at_intervals(model_class, job_class_name, delay_interval, batch_size: BACKGROUND_MIGRATION_BATCH_SIZE) - raise "#{model_class} does not have an ID to use for batch ranges" unless model_class.column_names.include?('id') - - model_class.each_batch(of: batch_size) do |relation, index| - start_id, end_id = relation.pluck('MIN(id), MAX(id)').first - - # `BackgroundMigrationWorker.bulk_perform_in` schedules all jobs for - # the same time, which is not helpful in most cases where we wish to - # spread the work over time. - BackgroundMigrationWorker.perform_in(delay_interval * index, job_class_name, [start_id, end_id]) - end - end - private # https://github.com/rails/rails/blob/v5.2.0/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb#L678-L684 diff --git a/lib/paperclip/response_with_limit_adapter.rb b/lib/paperclip/response_with_limit_adapter.rb index deb89717a4..ff7a938abb 100644 --- a/lib/paperclip/response_with_limit_adapter.rb +++ b/lib/paperclip/response_with_limit_adapter.rb @@ -16,7 +16,7 @@ module Paperclip private def cache_current_values - @original_filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data' + @original_filename = truncated_filename @tempfile = copy_to_tempfile(@target) @content_type = ContentTypeDetector.new(@tempfile.path).detect @size = File.size(@tempfile) @@ -43,6 +43,13 @@ module Paperclip source.response.connection.close end + def truncated_filename + filename = filename_from_content_disposition.presence || filename_from_path.presence || 'data' + extension = File.extname(filename) + basename = File.basename(filename, extension) + [basename[...20], extension[..4]].compact_blank.join + end + def filename_from_content_disposition disposition = @target.response.headers['content-disposition'] disposition&.match(/filename="([^"]*)"/)&.captures&.first diff --git a/lib/tasks/db.rake b/lib/tasks/db.rake index 3bc526bd21..4208c2ae4b 100644 --- a/lib/tasks/db.rake +++ b/lib/tasks/db.rake @@ -16,8 +16,8 @@ namespace :db do end task pre_migration_check: :environment do - version = ActiveRecord::Base.connection.select_one("SELECT current_setting('server_version_num') AS v")['v'].to_i - abort 'This version of Mastodon requires PostgreSQL 9.5 or newer. Please update PostgreSQL before updating Mastodon' if version < 90_500 + version = ActiveRecord::Base.connection.database_version + abort 'This version of Mastodon requires PostgreSQL 12.0 or newer. Please update PostgreSQL before updating Mastodon.' if version < 120_000 end Rake::Task['db:migrate'].enhance(['db:pre_migration_check']) diff --git a/spec/controllers/auth/sessions_controller_spec.rb b/spec/controllers/auth/sessions_controller_spec.rb index e3f2b278bd..d238626c9d 100644 --- a/spec/controllers/auth/sessions_controller_spec.rb +++ b/spec/controllers/auth/sessions_controller_spec.rb @@ -262,6 +262,26 @@ RSpec.describe Auth::SessionsController do end end + context 'when repeatedly using an invalid TOTP code before using a valid code' do + before do + stub_const('Auth::SessionsController::MAX_2FA_ATTEMPTS_PER_HOUR', 2) + end + + it 'does not log the user in' do + # Travel to the beginning of an hour to avoid crossing rate-limit buckets + travel_to '2023-12-20T10:00:00Z' + + Auth::SessionsController::MAX_2FA_ATTEMPTS_PER_HOUR.times do + post :create, params: { user: { otp_attempt: '1234' } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s } + expect(controller.current_user).to be_nil + end + + post :create, params: { user: { otp_attempt: user.current_otp } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s } + expect(controller.current_user).to be_nil + expect(flash[:alert]).to match I18n.t('users.rate_limited') + end + end + context 'when using a valid OTP' do before do post :create, params: { user: { otp_attempt: user.current_otp } }, session: { attempt_user_id: user.id, attempt_user_updated_at: user.updated_at.to_s } diff --git a/spec/models/account_spec.rb b/spec/models/account_spec.rb index b03fcd54c3..35f7190920 100644 --- a/spec/models/account_spec.rb +++ b/spec/models/account_spec.rb @@ -9,14 +9,10 @@ RSpec.describe Account do let(:bob) { Fabricate(:account, username: 'bob') } describe '#suspend!' do - it 'marks the account as suspended' do - subject.suspend! - expect(subject.suspended?).to be true - end - - it 'creates a deletion request' do - subject.suspend! - expect(AccountDeletionRequest.where(account: subject).exists?).to be true + it 'marks the account as suspended and creates a deletion request' do + expect { subject.suspend! } + .to change(subject, :suspended?).from(false).to(true) + .and(change { AccountDeletionRequest.exists?(account: subject) }.from(false).to(true)) end context 'when the account is of a local user' do @@ -1050,6 +1046,25 @@ RSpec.describe Account do end describe 'scopes' do + describe 'auditable' do + let!(:alice) { Fabricate :account } + let!(:bob) { Fabricate :account } + + before do + 2.times { Fabricate :action_log, account: alice } + end + + it 'returns distinct accounts with action log records' do + results = described_class.auditable + + expect(results.size) + .to eq(1) + expect(results) + .to include(alice) + .and not_include(bob) + end + end + describe 'alphabetic' do it 'sorts by alphabetic order of domain and username' do matches = [ diff --git a/spec/models/domain_allow_spec.rb b/spec/models/domain_allow_spec.rb index 49e16376ea..12504211a1 100644 --- a/spec/models/domain_allow_spec.rb +++ b/spec/models/domain_allow_spec.rb @@ -3,16 +3,18 @@ require 'rails_helper' describe DomainAllow do - describe 'scopes' do - describe 'matches_domain' do - let(:domain) { Fabricate(:domain_allow, domain: 'example.com') } - let(:other_domain) { Fabricate(:domain_allow, domain: 'example.biz') } + describe 'Validations' do + it 'is invalid without a domain' do + domain_allow = Fabricate.build(:domain_allow, domain: nil) + domain_allow.valid? + expect(domain_allow).to model_have_error_on_field(:domain) + end - it 'returns the correct records' do - results = described_class.matches_domain('example.com') - - expect(results).to eq([domain]) - end + it 'is invalid if the same normalized domain already exists' do + _domain_allow = Fabricate(:domain_allow, domain: 'にゃん') + domain_allow_with_normalized_value = Fabricate.build(:domain_allow, domain: 'xn--r9j5b5b') + domain_allow_with_normalized_value.valid? + expect(domain_allow_with_normalized_value).to model_have_error_on_field(:domain) end end end diff --git a/spec/controllers/api/v1/accounts/follower_accounts_controller_spec.rb b/spec/requests/api/v1/accounts/follower_accounts_spec.rb similarity index 69% rename from spec/controllers/api/v1/accounts/follower_accounts_controller_spec.rb rename to spec/requests/api/v1/accounts/follower_accounts_spec.rb index 510a47566b..7ff92d6a48 100644 --- a/spec/controllers/api/v1/accounts/follower_accounts_controller_spec.rb +++ b/spec/requests/api/v1/accounts/follower_accounts_spec.rb @@ -2,11 +2,11 @@ require 'rails_helper' -describe Api::V1::Accounts::FollowerAccountsController do - render_views - +describe 'API V1 Accounts FollowerAccounts' do let(:user) { Fabricate(:user) } - let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:accounts') } + let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: scopes) } + let(:scopes) { 'read:accounts' } + let(:headers) { { 'Authorization' => "Bearer #{token.token}" } } let(:account) { Fabricate(:account) } let(:alice) { Fabricate(:account) } let(:bob) { Fabricate(:account) } @@ -14,12 +14,11 @@ describe Api::V1::Accounts::FollowerAccountsController do before do alice.follow!(account) bob.follow!(account) - allow(controller).to receive(:doorkeeper_token) { token } end - describe 'GET #index' do + describe 'GET /api/v1/accounts/:acount_id/followers' do it 'returns accounts following the given account', :aggregate_failures do - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/followers", params: { limit: 2 }, headers: headers expect(response).to have_http_status(200) expect(body_as_json.size).to eq 2 @@ -28,7 +27,7 @@ describe Api::V1::Accounts::FollowerAccountsController do it 'does not return blocked users', :aggregate_failures do user.account.block!(bob) - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/followers", params: { limit: 2 }, headers: headers expect(response).to have_http_status(200) expect(body_as_json.size).to eq 1 @@ -41,7 +40,7 @@ describe Api::V1::Accounts::FollowerAccountsController do end it 'hides results' do - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/followers", params: { limit: 2 }, headers: headers expect(body_as_json.size).to eq 0 end end @@ -51,7 +50,7 @@ describe Api::V1::Accounts::FollowerAccountsController do it 'returns all accounts, including muted accounts' do account.mute!(bob) - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/followers", params: { limit: 2 }, headers: headers expect(body_as_json.size).to eq 2 expect([body_as_json[0][:id], body_as_json[1][:id]]).to contain_exactly(alice.id.to_s, bob.id.to_s) diff --git a/spec/controllers/api/v1/accounts/following_accounts_controller_spec.rb b/spec/requests/api/v1/accounts/following_accounts_spec.rb similarity index 69% rename from spec/controllers/api/v1/accounts/following_accounts_controller_spec.rb rename to spec/requests/api/v1/accounts/following_accounts_spec.rb index a7d07a6bec..b343a48654 100644 --- a/spec/controllers/api/v1/accounts/following_accounts_controller_spec.rb +++ b/spec/requests/api/v1/accounts/following_accounts_spec.rb @@ -2,11 +2,11 @@ require 'rails_helper' -describe Api::V1::Accounts::FollowingAccountsController do - render_views - +describe 'API V1 Accounts FollowingAccounts' do let(:user) { Fabricate(:user) } - let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:accounts') } + let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: scopes) } + let(:scopes) { 'read:accounts' } + let(:headers) { { 'Authorization' => "Bearer #{token.token}" } } let(:account) { Fabricate(:account) } let(:alice) { Fabricate(:account) } let(:bob) { Fabricate(:account) } @@ -14,12 +14,11 @@ describe Api::V1::Accounts::FollowingAccountsController do before do account.follow!(alice) account.follow!(bob) - allow(controller).to receive(:doorkeeper_token) { token } end - describe 'GET #index' do + describe 'GET /api/v1/accounts/:account_id/following' do it 'returns accounts followed by the given account', :aggregate_failures do - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/following", params: { limit: 2 }, headers: headers expect(response).to have_http_status(200) expect(body_as_json.size).to eq 2 @@ -28,7 +27,7 @@ describe Api::V1::Accounts::FollowingAccountsController do it 'does not return blocked users', :aggregate_failures do user.account.block!(bob) - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/following", params: { limit: 2 }, headers: headers expect(response).to have_http_status(200) expect(body_as_json.size).to eq 1 @@ -41,7 +40,7 @@ describe Api::V1::Accounts::FollowingAccountsController do end it 'hides results' do - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/following", params: { limit: 2 }, headers: headers expect(body_as_json.size).to eq 0 end end @@ -51,7 +50,7 @@ describe Api::V1::Accounts::FollowingAccountsController do it 'returns all accounts, including muted accounts' do account.mute!(bob) - get :index, params: { account_id: account.id, limit: 2 } + get "/api/v1/accounts/#{account.id}/following", params: { limit: 2 }, headers: headers expect(body_as_json.size).to eq 2 expect([body_as_json[0][:id], body_as_json[1][:id]]).to contain_exactly(alice.id.to_s, bob.id.to_s) diff --git a/spec/requests/api/v1/peers/search_spec.rb b/spec/requests/api/v1/peers/search_spec.rb new file mode 100644 index 0000000000..dcdea387a5 --- /dev/null +++ b/spec/requests/api/v1/peers/search_spec.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +require 'rails_helper' + +describe 'API Peers Search' do + describe 'GET /api/v1/peers/search' do + context 'when peers api is disabled' do + before do + Setting.peers_api_enabled = false + end + + it 'returns http not found response' do + get '/api/v1/peers/search' + + expect(response) + .to have_http_status(404) + end + end + + context 'with no search param' do + it 'returns http success and empty response' do + get '/api/v1/peers/search' + + expect(response) + .to have_http_status(200) + expect(body_as_json) + .to be_blank + end + end + + context 'with invalid search param' do + it 'returns http success and empty response' do + get '/api/v1/peers/search', params: { q: 'ftp://Invalid-Host!!.valüe' } + + expect(response) + .to have_http_status(200) + expect(body_as_json) + .to be_blank + end + end + + context 'with search param' do + let!(:account) { Fabricate(:account, domain: 'host.example') } + + before { Instance.refresh } + + it 'returns http success and json with known domains' do + get '/api/v1/peers/search', params: { q: 'host.example' } + + expect(response) + .to have_http_status(200) + expect(body_as_json.size) + .to eq(1) + expect(body_as_json.first) + .to eq(account.domain) + end + end + end +end diff --git a/spec/services/activitypub/fetch_featured_collection_service_spec.rb b/spec/services/activitypub/fetch_featured_collection_service_spec.rb index a98108cea3..b9e95b825f 100644 --- a/spec/services/activitypub/fetch_featured_collection_service_spec.rb +++ b/spec/services/activitypub/fetch_featured_collection_service_spec.rb @@ -31,7 +31,7 @@ RSpec.describe ActivityPub::FetchFeaturedCollectionService, type: :service do } end - let(:status_json_pinned_unknown_unreachable) do + let(:status_json_pinned_unknown_reachable) do { '@context': 'https://www.w3.org/ns/activitystreams', type: 'Note', @@ -75,7 +75,7 @@ RSpec.describe ActivityPub::FetchFeaturedCollectionService, type: :service do stub_request(:get, 'https://example.com/account/pinned/known').to_return(status: 200, body: Oj.dump(status_json_pinned_known)) stub_request(:get, 'https://example.com/account/pinned/unknown-inlined').to_return(status: 200, body: Oj.dump(status_json_pinned_unknown_inlined)) stub_request(:get, 'https://example.com/account/pinned/unknown-unreachable').to_return(status: 404) - stub_request(:get, 'https://example.com/account/pinned/unknown-reachable').to_return(status: 200, body: Oj.dump(status_json_pinned_unknown_unreachable)) + stub_request(:get, 'https://example.com/account/pinned/unknown-reachable').to_return(status: 200, body: Oj.dump(status_json_pinned_unknown_reachable)) stub_request(:get, 'https://example.com/account/collections/featured').to_return(status: 200, body: Oj.dump(featured_with_null)) subject.call(actor, note: true, hashtag: false) @@ -115,6 +115,21 @@ RSpec.describe ActivityPub::FetchFeaturedCollectionService, type: :service do end it_behaves_like 'sets pinned posts' + + context 'when there is a single item, with the array compacted away' do + let(:items) { 'https://example.com/account/pinned/unknown-reachable' } + + before do + stub_request(:get, 'https://example.com/account/pinned/unknown-reachable').to_return(status: 200, body: Oj.dump(status_json_pinned_unknown_reachable)) + subject.call(actor, note: true, hashtag: false) + end + + it 'sets expected posts as pinned posts' do + expect(actor.pinned_statuses.pluck(:uri)).to contain_exactly( + 'https://example.com/account/pinned/unknown-reachable' + ) + end + end end context 'when the endpoint is a paginated Collection' do @@ -136,6 +151,21 @@ RSpec.describe ActivityPub::FetchFeaturedCollectionService, type: :service do end it_behaves_like 'sets pinned posts' + + context 'when there is a single item, with the array compacted away' do + let(:items) { 'https://example.com/account/pinned/unknown-reachable' } + + before do + stub_request(:get, 'https://example.com/account/pinned/unknown-reachable').to_return(status: 200, body: Oj.dump(status_json_pinned_unknown_reachable)) + subject.call(actor, note: true, hashtag: false) + end + + it 'sets expected posts as pinned posts' do + expect(actor.pinned_statuses.pluck(:uri)).to contain_exactly( + 'https://example.com/account/pinned/unknown-reachable' + ) + end + end end end end diff --git a/spec/services/activitypub/fetch_replies_service_spec.rb b/spec/services/activitypub/fetch_replies_service_spec.rb index d7716dd4ef..a76b996c20 100644 --- a/spec/services/activitypub/fetch_replies_service_spec.rb +++ b/spec/services/activitypub/fetch_replies_service_spec.rb @@ -34,6 +34,18 @@ RSpec.describe ActivityPub::FetchRepliesService, type: :service do describe '#call' do context 'when the payload is a Collection with inlined replies' do + context 'when there is a single reply, with the array compacted away' do + let(:items) { 'http://example.com/self-reply-1' } + + it 'queues the expected worker' do + allow(FetchReplyWorker).to receive(:push_bulk) + + subject.call(status, payload) + + expect(FetchReplyWorker).to have_received(:push_bulk).with(['http://example.com/self-reply-1']) + end + end + context 'when passing the collection itself' do it 'spawns workers for up to 5 replies on the same server' do allow(FetchReplyWorker).to receive(:push_bulk) diff --git a/spec/services/purge_domain_service_spec.rb b/spec/services/purge_domain_service_spec.rb index e96618310b..6d8af14deb 100644 --- a/spec/services/purge_domain_service_spec.rb +++ b/spec/services/purge_domain_service_spec.rb @@ -5,25 +5,25 @@ require 'rails_helper' RSpec.describe PurgeDomainService, type: :service do subject { described_class.new } - let!(:old_account) { Fabricate(:account, domain: 'obsolete.org') } - let!(:old_status_plain) { Fabricate(:status, account: old_account) } - let!(:old_status_with_attachment) { Fabricate(:status, account: old_account) } - let!(:old_attachment) { Fabricate(:media_attachment, account: old_account, status: old_status_with_attachment, file: attachment_fixture('attachment.jpg')) } + let(:domain) { 'obsolete.org' } + let!(:account) { Fabricate(:account, domain: domain) } + let!(:status_plain) { Fabricate(:status, account: account) } + let!(:status_with_attachment) { Fabricate(:status, account: account) } + let!(:attachment) { Fabricate(:media_attachment, account: account, status: status_with_attachment, file: attachment_fixture('attachment.jpg')) } describe 'for a suspension' do - before do - subject.call('obsolete.org') + it 'refreshes instance view and removes associated records' do + expect { subject.call(domain) } + .to change { domain_instance_exists }.from(true).to(false) + + expect { account.reload }.to raise_exception ActiveRecord::RecordNotFound + expect { status_plain.reload }.to raise_exception ActiveRecord::RecordNotFound + expect { status_with_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound + expect { attachment.reload }.to raise_exception ActiveRecord::RecordNotFound end - it 'removes the remote accounts\'s statuses and media attachments' do - expect { old_account.reload }.to raise_exception ActiveRecord::RecordNotFound - expect { old_status_plain.reload }.to raise_exception ActiveRecord::RecordNotFound - expect { old_status_with_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound - expect { old_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound - end - - it 'refreshes instances view' do - expect(Instance.where(domain: 'obsolete.org').exists?).to be false + def domain_instance_exists + Instance.exists?(domain: domain) end end end diff --git a/spec/services/unallow_domain_service_spec.rb b/spec/services/unallow_domain_service_spec.rb index 6ac6bc4016..383977d352 100644 --- a/spec/services/unallow_domain_service_spec.rb +++ b/spec/services/unallow_domain_service_spec.rb @@ -5,12 +5,13 @@ require 'rails_helper' RSpec.describe UnallowDomainService, type: :service do subject { described_class.new } - let!(:bad_account) { Fabricate(:account, username: 'badguy666', domain: 'evil.org') } + let(:bad_domain) { 'evil.org' } + let!(:bad_account) { Fabricate(:account, username: 'badguy666', domain: bad_domain) } let!(:bad_status_harassment) { Fabricate(:status, account: bad_account, text: 'You suck') } let!(:bad_status_mean) { Fabricate(:status, account: bad_account, text: 'Hahaha') } let!(:bad_attachment) { Fabricate(:media_attachment, account: bad_account, status: bad_status_mean, file: attachment_fixture('attachment.jpg')) } - let!(:already_banned_account) { Fabricate(:account, username: 'badguy', domain: 'evil.org', suspended: true, silenced: true) } - let!(:domain_allow) { Fabricate(:domain_allow, domain: 'evil.org') } + let!(:already_banned_account) { Fabricate(:account, username: 'badguy', domain: bad_domain, suspended: true, silenced: true) } + let!(:domain_allow) { Fabricate(:domain_allow, domain: bad_domain) } context 'with limited federation mode', :sidekiq_inline do before do @@ -18,23 +19,15 @@ RSpec.describe UnallowDomainService, type: :service do end describe '#call' do - before do - subject.call(domain_allow) - end + it 'makes the domain not allowed and removes accounts from that domain' do + expect { subject.call(domain_allow) } + .to change { bad_domain_allowed }.from(true).to(false) + .and change { bad_domain_account_exists }.from(true).to(false) - it 'removes the allowed domain' do - expect(DomainAllow.allowed?('evil.org')).to be false - end - - it 'removes remote accounts from that domain' do expect { already_banned_account.reload }.to raise_error(ActiveRecord::RecordNotFound) - expect(Account.where(domain: 'evil.org').exists?).to be false - end - - it 'removes the remote accounts\'s statuses and media attachments' do - expect { bad_status_harassment.reload }.to raise_exception ActiveRecord::RecordNotFound - expect { bad_status_mean.reload }.to raise_exception ActiveRecord::RecordNotFound - expect { bad_attachment.reload }.to raise_exception ActiveRecord::RecordNotFound + expect { bad_status_harassment.reload }.to raise_error(ActiveRecord::RecordNotFound) + expect { bad_status_mean.reload }.to raise_error(ActiveRecord::RecordNotFound) + expect { bad_attachment.reload }.to raise_error(ActiveRecord::RecordNotFound) end end end @@ -45,23 +38,23 @@ RSpec.describe UnallowDomainService, type: :service do end describe '#call' do - before do - subject.call(domain_allow) - end + it 'makes the domain not allowed but preserves accounts from the domain' do + expect { subject.call(domain_allow) } + .to change { bad_domain_allowed }.from(true).to(false) + .and not_change { bad_domain_account_exists }.from(true) - it 'removes the allowed domain' do - expect(DomainAllow.allowed?('evil.org')).to be false - end - - it 'does not remove accounts from that domain' do - expect(Account.where(domain: 'evil.org').exists?).to be true - end - - it 'removes the remote accounts\'s statuses and media attachments' do expect { bad_status_harassment.reload }.to_not raise_error expect { bad_status_mean.reload }.to_not raise_error expect { bad_attachment.reload }.to_not raise_error end end end + + def bad_domain_allowed + DomainAllow.allowed?(bad_domain) + end + + def bad_domain_account_exists + Account.exists?(domain: bad_domain) + end end diff --git a/streaming/.eslintrc.js b/streaming/.eslintrc.js index 5e2d233c68..188ebb512d 100644 --- a/streaming/.eslintrc.js +++ b/streaming/.eslintrc.js @@ -15,7 +15,18 @@ module.exports = defineConfig({ ecmaVersion: 2021, }, rules: { + // In the streaming server we need to delete some variables to ensure + // garbage collection takes place on the values referenced by those objects; + // The alternative is to declare the variable as nullable, but then we need + // to assert it's in existence before every use, which becomes much harder + // to maintain. + 'no-delete-var': 'off', + + // The streaming server is written in commonjs, not ESM for now: 'import/no-commonjs': 'off', + + // This overrides the base configuration for this rule to pick up + // dependencies for the streaming server from the correct package.json file. 'import/no-extraneous-dependencies': [ 'error', { diff --git a/streaming/index.js b/streaming/index.js index e33328e38a..7c483f67a6 100644 --- a/streaming/index.js +++ b/streaming/index.js @@ -10,12 +10,11 @@ const dotenv = require('dotenv'); const express = require('express'); const Redis = require('ioredis'); const { JSDOM } = require('jsdom'); -const log = require('npmlog'); const pg = require('pg'); const dbUrlToConfig = require('pg-connection-string').parse; -const uuid = require('uuid'); const WebSocket = require('ws'); +const { logger, httpLogger, initializeLogLevel, attachWebsocketHttpLogger, createWebsocketLogger } = require('./logging'); const { setupMetrics } = require('./metrics'); const { isTruthy } = require("./utils"); @@ -28,15 +27,30 @@ dotenv.config({ path: path.resolve(__dirname, path.join('..', dotenvFile)) }); -log.level = process.env.LOG_LEVEL || 'verbose'; +initializeLogLevel(process.env, environment); + +/** + * Declares the result type for accountFromToken / accountFromRequest. + * + * Note: This is here because jsdoc doesn't like importing types that + * are nested in functions + * @typedef ResolvedAccount + * @property {string} accessTokenId + * @property {string[]} scopes + * @property {string} accountId + * @property {string[]} chosenLanguages + * @property {string} deviceId + */ /** * @param {Object.} config */ const createRedisClient = async (config) => { const { redisParams, redisUrl } = config; + // @ts-ignore const client = new Redis(redisUrl, redisParams); - client.on('error', (err) => log.error('Redis Client Error!', err)); + // @ts-ignore + client.on('error', (err) => logger.error({ err }, 'Redis Client Error!')); return client; }; @@ -61,12 +75,12 @@ const parseJSON = (json, req) => { */ if (req) { if (req.accountId) { - log.warn(req.requestId, `Error parsing message from user ${req.accountId}: ${err}`); + req.log.error({ err }, `Error parsing message from user ${req.accountId}`); } else { - log.silly(req.requestId, `Error parsing message from ${req.remoteAddress}: ${err}`); + req.log.error({ err }, `Error parsing message from ${req.remoteAddress}`); } } else { - log.warn(`Error parsing message from redis: ${err}`); + logger.error({ err }, `Error parsing message from redis`); } return null; } @@ -105,6 +119,7 @@ const pgConfigFromEnv = (env) => { baseConfig.password = env.DB_PASS; } } else { + // @ts-ignore baseConfig = pgConfigs[environment]; if (env.DB_SSLMODE) { @@ -149,6 +164,7 @@ const redisConfigFromEnv = (env) => { // redisParams.path takes precedence over host and port. if (env.REDIS_URL && env.REDIS_URL.startsWith('unix://')) { + // @ts-ignore redisParams.path = env.REDIS_URL.slice(7); } @@ -195,6 +211,7 @@ const startServer = async () => { app.set('trust proxy', process.env.TRUSTED_PROXY_IP ? process.env.TRUSTED_PROXY_IP.split(/(?:\s*,\s*|\s+)/) : 'loopback,uniquelocal'); + app.use(httpLogger); app.use(cors()); // Handle eventsource & other http requests: @@ -202,32 +219,37 @@ const startServer = async () => { // Handle upgrade requests: server.on('upgrade', async function handleUpgrade(request, socket, head) { + // Setup the HTTP logger, since websocket upgrades don't get the usual http + // logger. This decorates the `request` object. + attachWebsocketHttpLogger(request); + + request.log.info("HTTP Upgrade Requested"); + /** @param {Error} err */ const onSocketError = (err) => { - log.error(`Error with websocket upgrade: ${err}`); + request.log.error({ error: err }, err.message); }; socket.on('error', onSocketError); - // Authenticate: - try { - await accountFromRequest(request); - } catch (err) { - log.error(`Error authenticating request: ${err}`); + /** @type {ResolvedAccount} */ + let resolvedAccount; + try { + resolvedAccount = await accountFromRequest(request); + } catch (err) { // Unfortunately for using the on('upgrade') setup, we need to manually // write a HTTP Response to the Socket to close the connection upgrade // attempt, so the following code is to handle all of that. const statusCode = err.status ?? 401; - /** @type {Record} */ + /** @type {Record} */ const headers = { 'Connection': 'close', 'Content-Type': 'text/plain', 'Content-Length': 0, 'X-Request-Id': request.id, - // TODO: Send the error message via header so it can be debugged in - // developer tools + 'X-Error-Message': err.status ? err.toString() : 'An unexpected error occurred' }; // Ensure the socket is closed once we've finished writing to it: @@ -238,15 +260,28 @@ const startServer = async () => { // Write the HTTP response manually: socket.end(`HTTP/1.1 ${statusCode} ${http.STATUS_CODES[statusCode]}\r\n${Object.keys(headers).map((key) => `${key}: ${headers[key]}`).join('\r\n')}\r\n\r\n`); + // Finally, log the error: + request.log.error({ + err, + res: { + statusCode, + headers + } + }, err.toString()); + return; } + // Remove the error handler, wss.handleUpgrade has its own: + socket.removeListener('error', onSocketError); + wss.handleUpgrade(request, socket, head, function done(ws) { - // Remove the error handler: - socket.removeListener('error', onSocketError); + request.log.info("Authenticated request & upgraded to WebSocket connection"); + + const wsLogger = createWebsocketLogger(request, resolvedAccount); // Start the connection: - wss.emit('connection', ws, request); + wss.emit('connection', ws, request, wsLogger); }); }); @@ -273,9 +308,9 @@ const startServer = async () => { // When checking metrics in the browser, the favicon is requested this // prevents the request from falling through to the API Router, which would // error for this endpoint: - app.get('/favicon.ico', (req, res) => res.status(404).end()); + app.get('/favicon.ico', (_req, res) => res.status(404).end()); - app.get('/api/v1/streaming/health', (req, res) => { + app.get('/api/v1/streaming/health', (_req, res) => { res.writeHead(200, { 'Content-Type': 'text/plain' }); res.end('OK'); }); @@ -285,7 +320,7 @@ const startServer = async () => { res.set('Content-Type', metrics.register.contentType); res.end(await metrics.register.metrics()); } catch (ex) { - log.error(ex); + req.log.error(ex); res.status(500).end(); } }); @@ -319,7 +354,7 @@ const startServer = async () => { const callbacks = subs[channel]; - log.silly(`New message on channel ${redisPrefix}${channel}`); + logger.debug(`New message on channel ${redisPrefix}${channel}`); if (!callbacks) { return; @@ -343,17 +378,16 @@ const startServer = async () => { * @param {SubscriptionListener} callback */ const subscribe = (channel, callback) => { - log.silly(`Adding listener for ${channel}`); + logger.debug(`Adding listener for ${channel}`); subs[channel] = subs[channel] || []; if (subs[channel].length === 0) { - log.verbose(`Subscribe ${channel}`); + logger.debug(`Subscribe ${channel}`); redisSubscribeClient.subscribe(channel, (err, count) => { if (err) { - log.error(`Error subscribing to ${channel}`); - } - else { + logger.error(`Error subscribing to ${channel}`); + } else if (typeof count === 'number') { redisSubscriptions.set(count); } }); @@ -367,7 +401,7 @@ const startServer = async () => { * @param {SubscriptionListener} callback */ const unsubscribe = (channel, callback) => { - log.silly(`Removing listener for ${channel}`); + logger.debug(`Removing listener for ${channel}`); if (!subs[channel]) { return; @@ -376,12 +410,11 @@ const startServer = async () => { subs[channel] = subs[channel].filter(item => item !== callback); if (subs[channel].length === 0) { - log.verbose(`Unsubscribe ${channel}`); + logger.debug(`Unsubscribe ${channel}`); redisSubscribeClient.unsubscribe(channel, (err, count) => { if (err) { - log.error(`Error unsubscribing to ${channel}`); - } - else { + logger.error(`Error unsubscribing to ${channel}`); + } else if (typeof count === 'number') { redisSubscriptions.set(count); } }); @@ -390,45 +423,13 @@ const startServer = async () => { }; /** - * @param {any} req - * @param {any} res - * @param {function(Error=): void} next - */ - const setRequestId = (req, res, next) => { - req.requestId = uuid.v4(); - res.header('X-Request-Id', req.requestId); - - next(); - }; - - /** - * @param {any} req - * @param {any} res - * @param {function(Error=): void} next - */ - const setRemoteAddress = (req, res, next) => { - req.remoteAddress = req.connection.remoteAddress; - - next(); - }; - - /** - * @param {any} req + * @param {http.IncomingMessage & ResolvedAccount} req * @param {string[]} necessaryScopes * @returns {boolean} */ const isInScope = (req, necessaryScopes) => req.scopes.some(scope => necessaryScopes.includes(scope)); - /** - * @typedef ResolvedAccount - * @property {string} accessTokenId - * @property {string[]} scopes - * @property {string} accountId - * @property {string[]} chosenLanguages - * @property {string} deviceId - */ - /** * @param {string} token * @param {any} req @@ -441,6 +442,7 @@ const startServer = async () => { return; } + // @ts-ignore client.query('SELECT oauth_access_tokens.id, oauth_access_tokens.resource_owner_id, users.account_id, users.chosen_languages, oauth_access_tokens.scopes, devices.device_id FROM oauth_access_tokens INNER JOIN users ON oauth_access_tokens.resource_owner_id = users.id LEFT OUTER JOIN devices ON oauth_access_tokens.id = devices.access_token_id WHERE oauth_access_tokens.token = $1 AND oauth_access_tokens.revoked_at IS NULL LIMIT 1', [token], (err, result) => { done(); @@ -451,6 +453,7 @@ const startServer = async () => { if (result.rows.length === 0) { err = new Error('Invalid access token'); + // @ts-ignore err.status = 401; reject(err); @@ -485,6 +488,7 @@ const startServer = async () => { if (!authorization && !accessToken) { const err = new Error('Missing access token'); + // @ts-ignore err.status = 401; reject(err); @@ -531,15 +535,16 @@ const startServer = async () => { }; /** - * @param {any} req + * @param {http.IncomingMessage & ResolvedAccount} req + * @param {import('pino').Logger} logger * @param {string|undefined} channelName * @returns {Promise.} */ - const checkScopes = (req, channelName) => new Promise((resolve, reject) => { - log.silly(req.requestId, `Checking OAuth scopes for ${channelName}`); + const checkScopes = (req, logger, channelName) => new Promise((resolve, reject) => { + logger.debug(`Checking OAuth scopes for ${channelName}`); // When accessing public channels, no scopes are needed - if (PUBLIC_CHANNELS.includes(channelName)) { + if (channelName && PUBLIC_CHANNELS.includes(channelName)) { resolve(); return; } @@ -566,6 +571,7 @@ const startServer = async () => { } const err = new Error('Access token does not cover required scopes'); + // @ts-ignore err.status = 401; reject(err); @@ -579,38 +585,40 @@ const startServer = async () => { /** * @param {any} req * @param {SystemMessageHandlers} eventHandlers - * @returns {function(object): void} + * @returns {SubscriptionListener} */ const createSystemMessageListener = (req, eventHandlers) => { return message => { + if (!message?.event) { + return; + } + const { event } = message; - log.silly(req.requestId, `System message for ${req.accountId}: ${event}`); + req.log.debug(`System message for ${req.accountId}: ${event}`); if (event === 'kill') { - log.verbose(req.requestId, `Closing connection for ${req.accountId} due to expired access token`); + req.log.debug(`Closing connection for ${req.accountId} due to expired access token`); eventHandlers.onKill(); } else if (event === 'filters_changed') { - log.verbose(req.requestId, `Invalidating filters cache for ${req.accountId}`); + req.log.debug(`Invalidating filters cache for ${req.accountId}`); req.cachedFilters = null; } }; }; /** - * @param {any} req - * @param {any} res + * @param {http.IncomingMessage & ResolvedAccount} req + * @param {http.OutgoingMessage} res */ const subscribeHttpToSystemChannel = (req, res) => { const accessTokenChannelId = `timeline:access_token:${req.accessTokenId}`; const systemChannelId = `timeline:system:${req.accountId}`; const listener = createSystemMessageListener(req, { - onKill() { res.end(); }, - }); res.on('close', () => { @@ -643,13 +651,14 @@ const startServer = async () => { // the connection, as there's nothing to stream back if (!channelName) { const err = new Error('Unknown channel requested'); + // @ts-ignore err.status = 400; next(err); return; } - accountFromRequest(req).then(() => checkScopes(req, channelName)).then(() => { + accountFromRequest(req).then(() => checkScopes(req, req.log, channelName)).then(() => { subscribeHttpToSystemChannel(req, res); }).then(() => { next(); @@ -665,22 +674,28 @@ const startServer = async () => { * @param {function(Error=): void} next */ const errorMiddleware = (err, req, res, next) => { - log.error(req.requestId, err.toString()); + req.log.error({ err }, err.toString()); if (res.headersSent) { next(err); return; } - res.writeHead(err.status || 500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: err.status ? err.toString() : 'An unexpected error occurred' })); + const hasStatusCode = Object.hasOwnProperty.call(err, 'status'); + // @ts-ignore + const statusCode = hasStatusCode ? err.status : 500; + const errorMessage = hasStatusCode ? err.toString() : 'An unexpected error occurred'; + + res.writeHead(statusCode, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: errorMessage })); }; /** - * @param {array} arr + * @param {any[]} arr * @param {number=} shift * @returns {string} */ + // @ts-ignore const placeholders = (arr, shift = 0) => arr.map((_, i) => `$${i + 1 + shift}`).join(', '); /** @@ -697,6 +712,7 @@ const startServer = async () => { return; } + // @ts-ignore client.query('SELECT id, account_id FROM lists WHERE id = $1 LIMIT 1', [listId], (err, result) => { done(); @@ -724,6 +740,7 @@ const startServer = async () => { return; } + // @ts-ignore client.query('SELECT id, account_id FROM antennas WHERE id = $1 LIMIT 1', [antennaId], (err, result) => { done(); @@ -738,34 +755,43 @@ const startServer = async () => { }); /** - * @param {string[]} ids - * @param {any} req + * @param {string[]} channelIds + * @param {http.IncomingMessage & ResolvedAccount} req + * @param {import('pino').Logger} log * @param {function(string, string): void} output * @param {undefined | function(string[], SubscriptionListener): void} attachCloseHandler * @param {'websocket' | 'eventsource'} destinationType * @param {boolean=} needsFiltering * @returns {SubscriptionListener} */ - const streamFrom = (ids, req, output, attachCloseHandler, destinationType, needsFiltering = false) => { - const accountId = req.accountId || req.remoteAddress; - - log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`); + const streamFrom = (channelIds, req, log, output, attachCloseHandler, destinationType, needsFiltering = false) => { + log.info({ channelIds }, `Starting stream`); + /** + * @param {string} event + * @param {object|string} payload + */ const transmit = (event, payload) => { // TODO: Replace "string"-based delete payloads with object payloads: const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload; messagesSent.labels({ type: destinationType }).inc(1); - log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload}`); + log.debug({ event, payload }, `Transmitting ${event} to ${req.accountId}`); + output(event, encodedPayload); }; // The listener used to process each message off the redis subscription, // message here is an object with an `event` and `payload` property. Some // events also include a queued_at value, but this is being removed shortly. + /** @type {SubscriptionListener} */ const listener = message => { + if (!message?.event || !message?.payload) { + return; + } + const { event, payload } = message; // reference_texts property is not working if ProcessReferencesWorker is @@ -795,7 +821,7 @@ const startServer = async () => { // Filter based on language: if (Array.isArray(req.chosenLanguages) && payload.language !== null && req.chosenLanguages.indexOf(payload.language) === -1) { - log.silly(req.requestId, `Message ${payload.id} filtered by language (${payload.language})`); + log.debug(`Message ${payload.id} filtered by language (${payload.language})`); return; } @@ -806,6 +832,7 @@ const startServer = async () => { } // Filter based on domain blocks, blocks, mutes, or custom filters: + // @ts-ignore const targetAccountIds = [payload.account.id].concat(payload.mentions.map(item => item.id)); const accountDomain = payload.account.acct.split('@')[1]; @@ -817,6 +844,7 @@ const startServer = async () => { } const queries = [ + // @ts-ignore client.query(`SELECT 1 FROM blocks WHERE (account_id = $1 AND target_account_id IN (${placeholders(targetAccountIds, 2)})) @@ -829,13 +857,17 @@ const startServer = async () => { ]; if (accountDomain) { + // @ts-ignore queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain])); } + // @ts-ignore if (!payload.filtered && !req.cachedFilters) { + // @ts-ignore queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, filter.with_quote AS with_quote, keyword.keyword AS keyword, keyword.whole_word AS whole_word, filter.exclude_follows AS exclude_follows, filter.exclude_localusers AS exclude_localusers FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId])); } if (!payload.filtered) { + // @ts-ignore queries.push(client.query(`SELECT 1 FROM follows WHERE (account_id = $1 AND target_account_id = $2)`, [req.accountId, payload.account.id])); @@ -862,9 +894,11 @@ const startServer = async () => { // Handling for constructing the custom filters and caching them on the request // TODO: Move this logic out of the message handling lifecycle + // @ts-ignore if (!req.cachedFilters) { const filterRows = values[accountDomain ? 2 : 1].rows; + // @ts-ignore req.cachedFilters = filterRows.reduce((cache, filter) => { if (cache[filter.id]) { cache[filter.id].keywords.push([filter.keyword, filter.whole_word]); @@ -898,7 +932,9 @@ const startServer = async () => { // needs to be done in a separate loop as the database returns one // filterRow per keyword, so we need all the keywords before // constructing the regular expression + // @ts-ignore Object.keys(req.cachedFilters).forEach((key) => { + // @ts-ignore req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => { let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); @@ -919,13 +955,16 @@ const startServer = async () => { // Apply cachedFilters against the payload, constructing a // `filter_results` array of FilterResult entities + // @ts-ignore if (req.cachedFilters) { const status = payload; // TODO: Calculate searchableContent in Ruby on Rails: + // @ts-ignore const searchableContent = ([status.spoiler_text || '', status.content, ...(reference_texts || [])].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(//g, '\n').replace(/<\/p>

/g, '\n\n'); const searchableTextContent = JSDOM.fragment(searchableContent).textContent; const now = new Date(); + // @ts-ignore const filter_results = Object.values(req.cachedFilters).reduce((results, cachedFilter) => { // Check the filter hasn't expired before applying: if (cachedFilter.expires_at !== null && cachedFilter.expires_at < now) { @@ -981,12 +1020,12 @@ const startServer = async () => { }); }; - ids.forEach(id => { + channelIds.forEach(id => { subscribe(`${redisPrefix}${id}`, listener); }); if (typeof attachCloseHandler === 'function') { - attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener); + attachCloseHandler(channelIds.map(id => `${redisPrefix}${id}`), listener); } return listener; @@ -998,8 +1037,6 @@ const startServer = async () => { * @returns {function(string, string): void} */ const streamToHttp = (req, res) => { - const accountId = req.accountId || req.remoteAddress; - const channelName = channelNameFromPath(req); connectedClients.labels({ type: 'eventsource' }).inc(); @@ -1018,7 +1055,8 @@ const startServer = async () => { const heartbeat = setInterval(() => res.write(':thump\n'), 15000); req.on('close', () => { - log.verbose(req.requestId, `Ending stream for ${accountId}`); + req.log.info({ accountId: req.accountId }, `Ending stream`); + // We decrement these counters here instead of in streamHttpEnd as in that // method we don't have knowledge of the channel names connectedClients.labels({ type: 'eventsource' }).dec(); @@ -1062,15 +1100,15 @@ const startServer = async () => { */ const streamToWs = (req, ws, streamName) => (event, payload) => { if (ws.readyState !== ws.OPEN) { - log.error(req.requestId, 'Tried writing to closed socket'); + req.log.error('Tried writing to closed socket'); return; } const message = JSON.stringify({ stream: streamName, event, payload }); - ws.send(message, (/** @type {Error} */ err) => { + ws.send(message, (/** @type {Error|undefined} */ err) => { if (err) { - log.error(req.requestId, `Failed to send to websocket: ${err}`); + req.log.error({err}, `Failed to send to websocket`); } }); }; @@ -1087,20 +1125,19 @@ const startServer = async () => { app.use(api); - api.use(setRequestId); - api.use(setRemoteAddress); - api.use(authenticationMiddleware); api.use(errorMiddleware); api.get('/api/v1/streaming/*', (req, res) => { + // @ts-ignore channelNameToIds(req, channelNameFromPath(req), req.query).then(({ channelIds, options }) => { const onSend = streamToHttp(req, res); const onEnd = streamHttpEnd(req, subscriptionHeartbeat(channelIds)); - streamFrom(channelIds, req, onSend, onEnd, 'eventsource', options.needsFiltering); + // @ts-ignore + streamFrom(channelIds, req, req.log, onSend, onEnd, 'eventsource', options.needsFiltering); }).catch(err => { - log.verbose(req.requestId, 'Subscription error:', err.toString()); + res.log.info({ err }, 'Subscription error:', err.toString()); httpNotFound(res); }); }); @@ -1109,6 +1146,7 @@ const startServer = async () => { * @typedef StreamParams * @property {string} [tag] * @property {string} [list] + * @property {string} [antenna] * @property {string} [only_media] */ @@ -1252,6 +1290,7 @@ const startServer = async () => { break; case 'list': + // @ts-ignore authorizeListAccess(params.list, req).then(() => { resolve({ channelIds: [`timeline:list:${params.list}`], @@ -1263,6 +1302,7 @@ const startServer = async () => { break; case 'antenna': + // @ts-ignore authorizeAntennaAccess(params.antenna, req).then(() => { resolve({ channelIds: [`timeline:antenna:${params.antenna}`], @@ -1284,11 +1324,11 @@ const startServer = async () => { * @returns {string[]} */ const streamNameFromChannelName = (channelName, params) => { - if (channelName === 'list') { + if (channelName === 'list' && params.list) { return [channelName, params.list]; - } else if (channelName === 'antenna') { + } else if (channelName === 'antenna' && params.antenna) { return [channelName, params.antenna]; - } else if (['hashtag', 'hashtag:local'].includes(channelName)) { + } else if (['hashtag', 'hashtag:local'].includes(channelName) && params.tag) { return [channelName, params.tag]; } else { return [channelName]; @@ -1297,8 +1337,9 @@ const startServer = async () => { /** * @typedef WebSocketSession - * @property {WebSocket} websocket - * @property {http.IncomingMessage} request + * @property {WebSocket & { isAlive: boolean}} websocket + * @property {http.IncomingMessage & ResolvedAccount} request + * @property {import('pino').Logger} logger * @property {Object.} subscriptions */ @@ -1308,8 +1349,8 @@ const startServer = async () => { * @param {StreamParams} params * @returns {void} */ - const subscribeWebsocketToChannel = ({ socket, request, subscriptions }, channelName, params) => { - checkScopes(request, channelName).then(() => channelNameToIds(request, channelName, params)).then(({ + const subscribeWebsocketToChannel = ({ websocket, request, logger, subscriptions }, channelName, params) => { + checkScopes(request, logger, channelName).then(() => channelNameToIds(request, channelName, params)).then(({ channelIds, options, }) => { @@ -1317,9 +1358,9 @@ const startServer = async () => { return; } - const onSend = streamToWs(request, socket, streamNameFromChannelName(channelName, params)); + const onSend = streamToWs(request, websocket, streamNameFromChannelName(channelName, params)); const stopHeartbeat = subscriptionHeartbeat(channelIds); - const listener = streamFrom(channelIds, request, onSend, undefined, 'websocket', options.needsFiltering); + const listener = streamFrom(channelIds, request, logger, onSend, undefined, 'websocket', options.needsFiltering); connectedChannels.labels({ type: 'websocket', channel: channelName }).inc(); @@ -1329,14 +1370,17 @@ const startServer = async () => { stopHeartbeat, }; }).catch(err => { - log.verbose(request.requestId, 'Subscription error:', err.toString()); - socket.send(JSON.stringify({ error: err.toString() })); + logger.error({ err }, 'Subscription error'); + websocket.send(JSON.stringify({ error: err.toString() })); }); }; - - const removeSubscription = (subscriptions, channelIds, request) => { - log.verbose(request.requestId, `Ending stream from ${channelIds.join(', ')} for ${request.accountId}`); + /** + * @param {WebSocketSession} session + * @param {string[]} channelIds + */ + const removeSubscription = ({ request, logger, subscriptions }, channelIds) => { + logger.info({ channelIds, accountId: request.accountId }, `Ending stream`); const subscription = subscriptions[channelIds.join(';')]; @@ -1360,16 +1404,17 @@ const startServer = async () => { * @param {StreamParams} params * @returns {void} */ - const unsubscribeWebsocketFromChannel = ({ socket, request, subscriptions }, channelName, params) => { + const unsubscribeWebsocketFromChannel = (session, channelName, params) => { + const { websocket, request, logger } = session; + channelNameToIds(request, channelName, params).then(({ channelIds }) => { - removeSubscription(subscriptions, channelIds, request); + removeSubscription(session, channelIds); }).catch(err => { - log.verbose(request.requestId, 'Unsubscribe error:', err); + logger.error({err}, 'Unsubscribe error'); // If we have a socket that is alive and open still, send the error back to the client: - // FIXME: In other parts of the code ws === socket - if (socket.isAlive && socket.readyState === socket.OPEN) { - socket.send(JSON.stringify({ error: "Error unsubscribing from channel" })); + if (websocket.isAlive && websocket.readyState === websocket.OPEN) { + websocket.send(JSON.stringify({ error: "Error unsubscribing from channel" })); } }); }; @@ -1377,16 +1422,14 @@ const startServer = async () => { /** * @param {WebSocketSession} session */ - const subscribeWebsocketToSystemChannel = ({ socket, request, subscriptions }) => { + const subscribeWebsocketToSystemChannel = ({ websocket, request, subscriptions }) => { const accessTokenChannelId = `timeline:access_token:${request.accessTokenId}`; const systemChannelId = `timeline:system:${request.accountId}`; const listener = createSystemMessageListener(request, { - onKill() { - socket.close(); + websocket.close(); }, - }); subscribe(`${redisPrefix}${accessTokenChannelId}`, listener); @@ -1423,18 +1466,15 @@ const startServer = async () => { /** * @param {WebSocket & { isAlive: boolean }} ws - * @param {http.IncomingMessage} req + * @param {http.IncomingMessage & ResolvedAccount} req + * @param {import('pino').Logger} log */ - function onConnection(ws, req) { + function onConnection(ws, req, log) { // Note: url.parse could throw, which would terminate the connection, so we // increment the connected clients metric straight away when we establish // the connection, without waiting: connectedClients.labels({ type: 'websocket' }).inc(); - // Setup request properties: - req.requestId = uuid.v4(); - req.remoteAddress = ws._socket.remoteAddress; - // Setup connection keep-alive state: ws.isAlive = true; ws.on('pong', () => { @@ -1445,8 +1485,9 @@ const startServer = async () => { * @type {WebSocketSession} */ const session = { - socket: ws, + websocket: ws, request: req, + logger: log, subscriptions: {}, }; @@ -1454,27 +1495,30 @@ const startServer = async () => { const subscriptions = Object.keys(session.subscriptions); subscriptions.forEach(channelIds => { - removeSubscription(session.subscriptions, channelIds.split(';'), req); + removeSubscription(session, channelIds.split(';')); }); // Decrement the metrics for connected clients: connectedClients.labels({ type: 'websocket' }).dec(); - // ensure garbage collection: - session.socket = null; - session.request = null; - session.subscriptions = {}; + // We need to delete the session object as to ensure it correctly gets + // garbage collected, without doing this we could accidentally hold on to + // references to the websocket, the request, and the logger, causing + // memory leaks. + // + // @ts-ignore + delete session; }); // Note: immediately after the `error` event is emitted, the `close` event // is emitted. As such, all we need to do is log the error here. - ws.on('error', (err) => { - log.error('websocket', err.toString()); + ws.on('error', (/** @type {Error} */ err) => { + log.error(err); }); ws.on('message', (data, isBinary) => { if (isBinary) { - log.warn('websocket', 'Received binary data, closing connection'); + log.warn('Received binary data, closing connection'); ws.close(1003, 'The mastodon streaming server does not support binary messages'); return; } @@ -1509,18 +1553,20 @@ const startServer = async () => { setInterval(() => { wss.clients.forEach(ws => { + // @ts-ignore if (ws.isAlive === false) { ws.terminate(); return; } + // @ts-ignore ws.isAlive = false; ws.ping('', false); }); }, 30000); attachServerWithConfig(server, address => { - log.warn(`Streaming API now listening on ${address}`); + logger.info(`Streaming API now listening on ${address}`); }); const onExit = () => { @@ -1528,8 +1574,10 @@ const startServer = async () => { process.exit(0); }; + /** @param {Error} err */ const onError = (err) => { - log.error(err); + logger.error(err); + server.close(); process.exit(0); }; @@ -1553,7 +1601,7 @@ const attachServerWithConfig = (server, onSuccess) => { } }); } else { - server.listen(+process.env.PORT || 4000, process.env.BIND || '127.0.0.1', () => { + server.listen(+(process.env.PORT || 4000), process.env.BIND || '127.0.0.1', () => { if (onSuccess) { onSuccess(`${server.address().address}:${server.address().port}`); } diff --git a/streaming/logging.js b/streaming/logging.js new file mode 100644 index 0000000000..64ee474875 --- /dev/null +++ b/streaming/logging.js @@ -0,0 +1,119 @@ +const { pino } = require('pino'); +const { pinoHttp, stdSerializers: pinoHttpSerializers } = require('pino-http'); +const uuid = require('uuid'); + +/** + * Generates the Request ID for logging and setting on responses + * @param {http.IncomingMessage} req + * @param {http.ServerResponse} [res] + * @returns {import("pino-http").ReqId} + */ +function generateRequestId(req, res) { + if (req.id) { + return req.id; + } + + req.id = uuid.v4(); + + // Allow for usage with WebSockets: + if (res) { + res.setHeader('X-Request-Id', req.id); + } + + return req.id; +} + +/** + * Request log sanitizer to prevent logging access tokens in URLs + * @param {http.IncomingMessage} req + */ +function sanitizeRequestLog(req) { + const log = pinoHttpSerializers.req(req); + if (typeof log.url === 'string' && log.url.includes('access_token')) { + // Doorkeeper uses SecureRandom.urlsafe_base64 per RFC 6749 / RFC 6750 + log.url = log.url.replace(/(access_token)=([a-zA-Z0-9\-_]+)/gi, '$1=[Redacted]'); + } + return log; +} + +const logger = pino({ + name: "streaming", + // Reformat the log level to a string: + formatters: { + level: (label) => { + return { + level: label + }; + }, + }, + redact: { + paths: [ + 'req.headers["sec-websocket-key"]', + // Note: we currently pass the AccessToken via the websocket subprotocol + // field, an anti-pattern, but this ensures it doesn't end up in logs. + 'req.headers["sec-websocket-protocol"]', + 'req.headers.authorization', + 'req.headers.cookie', + 'req.query.access_token' + ] + } +}); + +const httpLogger = pinoHttp({ + logger, + genReqId: generateRequestId, + serializers: { + req: sanitizeRequestLog + } +}); + +/** + * Attaches a logger to the request object received by http upgrade handlers + * @param {http.IncomingMessage} request + */ +function attachWebsocketHttpLogger(request) { + generateRequestId(request); + + request.log = logger.child({ + req: sanitizeRequestLog(request), + }); +} + +/** + * Creates a logger instance for the Websocket connection to use. + * @param {http.IncomingMessage} request + * @param {import('./index.js').ResolvedAccount} resolvedAccount + */ +function createWebsocketLogger(request, resolvedAccount) { + // ensure the request.id is always present. + generateRequestId(request); + + return logger.child({ + req: { + id: request.id + }, + account: { + id: resolvedAccount.accountId ?? null + } + }); +} + +exports.logger = logger; +exports.httpLogger = httpLogger; +exports.attachWebsocketHttpLogger = attachWebsocketHttpLogger; +exports.createWebsocketLogger = createWebsocketLogger; + +/** + * Initializes the log level based on the environment + * @param {Object} env + * @param {string} environment + */ +exports.initializeLogLevel = function initializeLogLevel(env, environment) { + if (env.LOG_LEVEL && Object.keys(logger.levels.values).includes(env.LOG_LEVEL)) { + logger.level = env.LOG_LEVEL; + } else if (environment === 'development') { + logger.level = 'debug'; + } else { + logger.level = 'info'; + } +}; diff --git a/streaming/package.json b/streaming/package.json index 149055ca1b..52a9979702 100644 --- a/streaming/package.json +++ b/streaming/package.json @@ -21,9 +21,10 @@ "express": "^4.18.2", "ioredis": "^5.3.2", "jsdom": "^23.0.0", - "npmlog": "^7.0.1", "pg": "^8.5.0", "pg-connection-string": "^2.6.0", + "pino": "^8.17.2", + "pino-http": "^9.0.0", "prom-client": "^15.0.0", "uuid": "^9.0.0", "ws": "^8.12.1" @@ -31,11 +32,11 @@ "devDependencies": { "@types/cors": "^2.8.16", "@types/express": "^4.17.17", - "@types/npmlog": "^7.0.0", "@types/pg": "^8.6.6", "@types/uuid": "^9.0.0", "@types/ws": "^8.5.9", "eslint-define-config": "^2.0.0", + "pino-pretty": "^10.3.1", "typescript": "^5.0.4" }, "optionalDependencies": { diff --git a/yarn.lock b/yarn.lock index f09a5aac13..4c9a4c53f3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2545,7 +2545,6 @@ __metadata: dependencies: "@types/cors": "npm:^2.8.16" "@types/express": "npm:^4.17.17" - "@types/npmlog": "npm:^7.0.0" "@types/pg": "npm:^8.6.6" "@types/uuid": "npm:^9.0.0" "@types/ws": "npm:^8.5.9" @@ -2556,9 +2555,11 @@ __metadata: express: "npm:^4.18.2" ioredis: "npm:^5.3.2" jsdom: "npm:^23.0.0" - npmlog: "npm:^7.0.1" pg: "npm:^8.5.0" pg-connection-string: "npm:^2.6.0" + pino: "npm:^8.17.2" + pino-http: "npm:^9.0.0" + pino-pretty: "npm:^10.3.1" prom-client: "npm:^15.0.0" typescript: "npm:^5.0.4" utf-8-validate: "npm:^6.0.3" @@ -3347,15 +3348,6 @@ __metadata: languageName: node linkType: hard -"@types/npmlog@npm:^7.0.0": - version: 7.0.0 - resolution: "@types/npmlog@npm:7.0.0" - dependencies: - "@types/node": "npm:*" - checksum: e94cb1d7dc6b1251d58d0a3cbf0c5b9e9b7c7649774cf816b9277fc10e1a09e65f2854357c4972d04d477f8beca3c8accb5e8546d594776e59e35ddfee79aff2 - languageName: node - linkType: hard - "@types/object-assign@npm:^4.0.30": version: 4.0.33 resolution: "@types/object-assign@npm:4.0.33" @@ -4333,13 +4325,6 @@ __metadata: languageName: node linkType: hard -"aproba@npm:^1.0.3 || ^2.0.0": - version: 2.0.0 - resolution: "aproba@npm:2.0.0" - checksum: d06e26384a8f6245d8c8896e138c0388824e259a329e0c9f196b4fa533c82502a6fd449586e3604950a0c42921832a458bb3aa0aa9f0ba449cfd4f50fd0d09b5 - languageName: node - linkType: hard - "are-docs-informative@npm:^0.0.2": version: 0.0.2 resolution: "are-docs-informative@npm:0.0.2" @@ -4347,16 +4332,6 @@ __metadata: languageName: node linkType: hard -"are-we-there-yet@npm:^4.0.0": - version: 4.0.0 - resolution: "are-we-there-yet@npm:4.0.0" - dependencies: - delegates: "npm:^1.0.0" - readable-stream: "npm:^4.1.0" - checksum: 760008e32948e9f738c5a288792d187e235fee0f170e042850bc7ff242f2a499f3f2874d6dd43ac06f5d9f5306137bc51bbdd4ae0bb11379c58b01678e0f684d - languageName: node - linkType: hard - "argparse@npm:^1.0.7": version: 1.0.10 resolution: "argparse@npm:1.0.10" @@ -4622,11 +4597,11 @@ __metadata: linkType: hard "async-mutex@npm:^0.4.0": - version: 0.4.0 - resolution: "async-mutex@npm:0.4.0" + version: 0.4.1 + resolution: "async-mutex@npm:0.4.1" dependencies: tslib: "npm:^2.4.0" - checksum: 6541695f80c1d6c5acbf3f7f04e8ff0733b3e029312c48d77bb95243fbe21fc5319f45ac3d72ce08551e6df83dc32440285ce9a3ac17bfc5d385ff0cc8ccd62a + checksum: 3c412736c0bc4a9a2cfd948276a8caab8686aa615866a5bd20986e616f8945320acb310058a17afa1b31b8de6f634a78b7ec2217a33d7559b38f68bb85a95854 languageName: node linkType: hard @@ -4678,13 +4653,20 @@ __metadata: languageName: node linkType: hard +"atomic-sleep@npm:^1.0.0": + version: 1.0.0 + resolution: "atomic-sleep@npm:1.0.0" + checksum: e329a6665512736a9bbb073e1761b4ec102f7926cce35037753146a9db9c8104f5044c1662e4a863576ce544fb8be27cd2be6bc8c1a40147d03f31eb1cfb6e8a + languageName: node + linkType: hard + "autoprefixer@npm:^10.4.14": - version: 10.4.16 - resolution: "autoprefixer@npm:10.4.16" + version: 10.4.17 + resolution: "autoprefixer@npm:10.4.17" dependencies: - browserslist: "npm:^4.21.10" - caniuse-lite: "npm:^1.0.30001538" - fraction.js: "npm:^4.3.6" + browserslist: "npm:^4.22.2" + caniuse-lite: "npm:^1.0.30001578" + fraction.js: "npm:^4.3.7" normalize-range: "npm:^0.1.2" picocolors: "npm:^1.0.0" postcss-value-parser: "npm:^4.2.0" @@ -4692,7 +4674,7 @@ __metadata: postcss: ^8.1.0 bin: autoprefixer: bin/autoprefixer - checksum: e00256e754d481a026d928bca729b25954074dd142dbec022f0a7db0d3bbc0dc2e2dc7542e94fec22eff81e21fe140e6856448e2d9a002660cb1e2ad434daee0 + checksum: 1d21cc8edb7bf993682094ceed03a32c18f5293f071182a64c2c6defb44bbe91d576ad775d2347469a81997b80cea0bbc4ad3eeb5b12710f9feacf2e6c04bb51 languageName: node linkType: hard @@ -5230,7 +5212,7 @@ __metadata: languageName: node linkType: hard -"browserslist@npm:^4.0.0, browserslist@npm:^4.21.10, browserslist@npm:^4.22.2": +"browserslist@npm:^4.0.0, browserslist@npm:^4.22.2": version: 4.22.2 resolution: "browserslist@npm:4.22.2" dependencies: @@ -5456,10 +5438,10 @@ __metadata: languageName: node linkType: hard -"caniuse-lite@npm:^1.0.0, caniuse-lite@npm:^1.0.30001538, caniuse-lite@npm:^1.0.30001565": - version: 1.0.30001568 - resolution: "caniuse-lite@npm:1.0.30001568" - checksum: 13f01e5a2481134bd61cf565ce9fecbd8e107902927a0dcf534230a92191a81f1715792170f5f39719c767c3a96aa6df9917a8d5601f15bbd5e4041a8cfecc99 +"caniuse-lite@npm:^1.0.0, caniuse-lite@npm:^1.0.30001565, caniuse-lite@npm:^1.0.30001578": + version: 1.0.30001578 + resolution: "caniuse-lite@npm:1.0.30001578" + checksum: c3bd9c08a945cee4f0cc284a217ebe9c2613e04d5aef4b48f1871a779b1875c34286469eb8d7d94bd028b5a354613e676ad503b6bf8db20a2f154574bd5fde48 languageName: node linkType: hard @@ -5763,15 +5745,6 @@ __metadata: languageName: node linkType: hard -"color-support@npm:^1.1.3": - version: 1.1.3 - resolution: "color-support@npm:1.1.3" - bin: - color-support: bin.js - checksum: 8ffeaa270a784dc382f62d9be0a98581db43e11eee301af14734a6d089bd456478b1a8b3e7db7ca7dc5b18a75f828f775c44074020b51c05fc00e6d0992b1cc6 - languageName: node - linkType: hard - "colord@npm:^2.9.1, colord@npm:^2.9.3": version: 2.9.3 resolution: "colord@npm:2.9.3" @@ -5779,7 +5752,7 @@ __metadata: languageName: node linkType: hard -"colorette@npm:^2.0.20": +"colorette@npm:^2.0.20, colorette@npm:^2.0.7": version: 2.0.20 resolution: "colorette@npm:2.0.20" checksum: e94116ff33b0ff56f3b83b9ace895e5bf87c2a7a47b3401b8c3f3226e050d5ef76cf4072fb3325f9dc24d1698f9b730baf4e05eeaf861d74a1883073f4c98a40 @@ -5911,13 +5884,6 @@ __metadata: languageName: node linkType: hard -"console-control-strings@npm:^1.1.0": - version: 1.1.0 - resolution: "console-control-strings@npm:1.1.0" - checksum: 7ab51d30b52d461412cd467721bb82afe695da78fff8f29fe6f6b9cbaac9a2328e27a22a966014df9532100f6dd85370460be8130b9c677891ba36d96a343f50 - languageName: node - linkType: hard - "constants-browserify@npm:^1.0.0": version: 1.0.0 resolution: "constants-browserify@npm:1.0.0" @@ -6454,6 +6420,13 @@ __metadata: languageName: node linkType: hard +"dateformat@npm:^4.6.3": + version: 4.6.3 + resolution: "dateformat@npm:4.6.3" + checksum: e2023b905e8cfe2eb8444fb558562b524807a51cdfe712570f360f873271600b5c94aebffaf11efb285e2c072264a7cf243eadb68f3eba0f8cc85fb86cd25df6 + languageName: node + linkType: hard + "debounce@npm:^1.2.1": version: 1.2.1 resolution: "debounce@npm:1.2.1" @@ -6689,13 +6662,6 @@ __metadata: languageName: node linkType: hard -"delegates@npm:^1.0.0": - version: 1.0.0 - resolution: "delegates@npm:1.0.0" - checksum: ba05874b91148e1db4bf254750c042bf2215febd23a6d3cda2e64896aef79745fbd4b9996488bd3cafb39ce19dbce0fd6e3b6665275638befffe1c9b312b91b5 - languageName: node - linkType: hard - "denque@npm:^2.1.0": version: 2.1.0 resolution: "denque@npm:2.1.0" @@ -7961,6 +7927,13 @@ __metadata: languageName: node linkType: hard +"fast-copy@npm:^3.0.0": + version: 3.0.1 + resolution: "fast-copy@npm:3.0.1" + checksum: a8310dbcc4c94ed001dc3e0bbc3c3f0491bb04e6c17163abe441a54997ba06cdf1eb532c2f05e54777c6f072c84548c23ef0ecd54665cd611be1d42f37eca258 + languageName: node + linkType: hard + "fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": version: 3.1.3 resolution: "fast-deep-equal@npm:3.1.3" @@ -8002,6 +7975,20 @@ __metadata: languageName: node linkType: hard +"fast-redact@npm:^3.1.1": + version: 3.3.0 + resolution: "fast-redact@npm:3.3.0" + checksum: d81562510681e9ba6404ee5d3838ff5257a44d2f80937f5024c099049ff805437d0fae0124458a7e87535cc9dcf4de305bb075cab8f08d6c720bbc3447861b4e + languageName: node + linkType: hard + +"fast-safe-stringify@npm:^2.1.1": + version: 2.1.1 + resolution: "fast-safe-stringify@npm:2.1.1" + checksum: d90ec1c963394919828872f21edaa3ad6f1dddd288d2bd4e977027afff09f5db40f94e39536d4646f7e01761d704d72d51dce5af1b93717f3489ef808f5f4e4d + languageName: node + linkType: hard + "fastest-levenshtein@npm:^1.0.16": version: 1.0.16 resolution: "fastest-levenshtein@npm:1.0.16" @@ -8283,10 +8270,10 @@ __metadata: languageName: node linkType: hard -"fraction.js@npm:^4.3.6": - version: 4.3.6 - resolution: "fraction.js@npm:4.3.6" - checksum: d224bf62e350c4dbe66c6ac5ad9c4ec6d3c8e64c13323686dbebe7c8cc118491c297dca4961d3c93f847670794cb05e6d8b706f0e870846ab66a9c4491d0e914 +"fraction.js@npm:^4.3.7": + version: 4.3.7 + resolution: "fraction.js@npm:4.3.7" + checksum: df291391beea9ab4c263487ffd9d17fed162dbb736982dee1379b2a8cc94e4e24e46ed508c6d278aded9080ba51872f1bc5f3a5fd8d7c74e5f105b508ac28711 languageName: node linkType: hard @@ -8416,22 +8403,6 @@ __metadata: languageName: node linkType: hard -"gauge@npm:^5.0.0": - version: 5.0.1 - resolution: "gauge@npm:5.0.1" - dependencies: - aproba: "npm:^1.0.3 || ^2.0.0" - color-support: "npm:^1.1.3" - console-control-strings: "npm:^1.1.0" - has-unicode: "npm:^2.0.1" - signal-exit: "npm:^4.0.1" - string-width: "npm:^4.2.3" - strip-ansi: "npm:^6.0.1" - wide-align: "npm:^1.1.5" - checksum: 845f9a2534356cd0e9c1ae590ed471bbe8d74c318915b92a34e8813b8d3441ca8e0eb0fa87a48081e70b63b84d398c5e66a13b8e8040181c10b9d77e9fe3287f - languageName: node - linkType: hard - "gensync@npm:^1.0.0-beta.2": version: 1.0.0-beta.2 resolution: "gensync@npm:1.0.0-beta.2" @@ -8780,13 +8751,6 @@ __metadata: languageName: node linkType: hard -"has-unicode@npm:^2.0.1": - version: 2.0.1 - resolution: "has-unicode@npm:2.0.1" - checksum: ebdb2f4895c26bb08a8a100b62d362e49b2190bcfd84b76bc4be1a3bd4d254ec52d0dd9f2fbcc093fc5eb878b20c52146f9dfd33e2686ed28982187be593b47c - languageName: node - linkType: hard - "has-value@npm:^0.3.1": version: 0.3.1 resolution: "has-value@npm:0.3.1" @@ -8863,6 +8827,13 @@ __metadata: languageName: node linkType: hard +"help-me@npm:^5.0.0": + version: 5.0.0 + resolution: "help-me@npm:5.0.0" + checksum: 054c0e2e9ae2231c85ab5e04f75109b9d068ffcc54e58fb22079822a5ace8ff3d02c66fd45379c902ad5ab825e5d2e1451fcc2f7eab1eb49e7d488133ba4cacb + languageName: node + linkType: hard + "history@npm:^4.10.1, history@npm:^4.9.0": version: 4.10.1 resolution: "history@npm:4.10.1" @@ -10579,6 +10550,13 @@ __metadata: languageName: node linkType: hard +"joycon@npm:^3.1.1": + version: 3.1.1 + resolution: "joycon@npm:3.1.1" + checksum: 131fb1e98c9065d067fd49b6e685487ac4ad4d254191d7aa2c9e3b90f4e9ca70430c43cad001602bdbdabcf58717d3b5c5b7461c1bd8e39478c8de706b3fe6ae + languageName: node + linkType: hard + "jpeg-autorotate@npm:^7.1.1": version: 7.1.1 resolution: "jpeg-autorotate@npm:7.1.1" @@ -11975,18 +11953,6 @@ __metadata: languageName: node linkType: hard -"npmlog@npm:^7.0.1": - version: 7.0.1 - resolution: "npmlog@npm:7.0.1" - dependencies: - are-we-there-yet: "npm:^4.0.0" - console-control-strings: "npm:^1.1.0" - gauge: "npm:^5.0.0" - set-blocking: "npm:^2.0.0" - checksum: d4e6a2aaa7b5b5d2e2ed8f8ac3770789ca0691a49f3576b6a8c97d560a4c3305d2c233a9173d62be737e6e4506bf9e89debd6120a3843c1d37315c34f90fef71 - languageName: node - linkType: hard - "nth-check@npm:^1.0.2": version: 1.0.2 resolution: "nth-check@npm:1.0.2" @@ -12159,6 +12125,13 @@ __metadata: languageName: node linkType: hard +"on-exit-leak-free@npm:^2.1.0": + version: 2.1.2 + resolution: "on-exit-leak-free@npm:2.1.2" + checksum: faea2e1c9d696ecee919026c32be8d6a633a7ac1240b3b87e944a380e8a11dc9c95c4a1f8fb0568de7ab8db3823e790f12bda45296b1d111e341aad3922a0570 + languageName: node + linkType: hard + "on-finished@npm:2.4.1": version: 2.4.1 resolution: "on-finished@npm:2.4.1" @@ -12726,6 +12699,80 @@ __metadata: languageName: node linkType: hard +"pino-abstract-transport@npm:^1.0.0, pino-abstract-transport@npm:v1.1.0": + version: 1.1.0 + resolution: "pino-abstract-transport@npm:1.1.0" + dependencies: + readable-stream: "npm:^4.0.0" + split2: "npm:^4.0.0" + checksum: 6e9b9d5a2c0a37f91ecaf224d335daae1ae682b1c79a05b06ef9e0f0a5d289f8e597992217efc857796dae6f1067e9b4882f95c6228ff433ddc153532cae8aca + languageName: node + linkType: hard + +"pino-http@npm:^9.0.0": + version: 9.0.0 + resolution: "pino-http@npm:9.0.0" + dependencies: + get-caller-file: "npm:^2.0.5" + pino: "npm:^8.17.1" + pino-std-serializers: "npm:^6.2.2" + process-warning: "npm:^3.0.0" + checksum: 05496cb76cc9908658e50c4620fbdf7b0b5d99fb529493d601c3e4635b0bf7ce12b8a8eed7b5b520089f643b099233d61dd71f7cdfad8b66e59b9b81d79b6512 + languageName: node + linkType: hard + +"pino-pretty@npm:^10.3.1": + version: 10.3.1 + resolution: "pino-pretty@npm:10.3.1" + dependencies: + colorette: "npm:^2.0.7" + dateformat: "npm:^4.6.3" + fast-copy: "npm:^3.0.0" + fast-safe-stringify: "npm:^2.1.1" + help-me: "npm:^5.0.0" + joycon: "npm:^3.1.1" + minimist: "npm:^1.2.6" + on-exit-leak-free: "npm:^2.1.0" + pino-abstract-transport: "npm:^1.0.0" + pump: "npm:^3.0.0" + readable-stream: "npm:^4.0.0" + secure-json-parse: "npm:^2.4.0" + sonic-boom: "npm:^3.0.0" + strip-json-comments: "npm:^3.1.1" + bin: + pino-pretty: bin.js + checksum: 6964fba5acc7a9f112e4c6738d602e123daf16cb5f6ddc56ab4b6bb05059f28876d51da8f72358cf1172e95fa12496b70465431a0836df693c462986d050686b + languageName: node + linkType: hard + +"pino-std-serializers@npm:^6.0.0, pino-std-serializers@npm:^6.2.2": + version: 6.2.2 + resolution: "pino-std-serializers@npm:6.2.2" + checksum: 8f1c7f0f0d8f91e6c6b5b2a6bfb48f06441abeb85f1c2288319f736f9c6d814fbeebe928d2314efc2ba6018fa7db9357a105eca9fc99fc1f28945a8a8b28d3d5 + languageName: node + linkType: hard + +"pino@npm:^8.17.1, pino@npm:^8.17.2": + version: 8.17.2 + resolution: "pino@npm:8.17.2" + dependencies: + atomic-sleep: "npm:^1.0.0" + fast-redact: "npm:^3.1.1" + on-exit-leak-free: "npm:^2.1.0" + pino-abstract-transport: "npm:v1.1.0" + pino-std-serializers: "npm:^6.0.0" + process-warning: "npm:^3.0.0" + quick-format-unescaped: "npm:^4.0.3" + real-require: "npm:^0.2.0" + safe-stable-stringify: "npm:^2.3.1" + sonic-boom: "npm:^3.7.0" + thread-stream: "npm:^2.0.0" + bin: + pino: bin.js + checksum: 9e55af6cd9d1833a4dbe64924fc73163295acd3c988a9c7db88926669f2574ab7ec607e8487b6dd71dbdad2d7c1c1aac439f37e59233f37220b1a9d88fa2ce01 + languageName: node + linkType: hard + "pirates@npm:^4.0.4": version: 4.0.6 resolution: "pirates@npm:4.0.6" @@ -13328,6 +13375,13 @@ __metadata: languageName: node linkType: hard +"process-warning@npm:^3.0.0": + version: 3.0.0 + resolution: "process-warning@npm:3.0.0" + checksum: 60f3c8ddee586f0706c1e6cb5aa9c86df05774b9330d792d7c8851cf0031afd759d665404d07037e0b4901b55c44a423f07bdc465c63de07d8d23196bb403622 + languageName: node + linkType: hard + "process@npm:^0.11.10": version: 0.11.10 resolution: "process@npm:0.11.10" @@ -13505,6 +13559,13 @@ __metadata: languageName: node linkType: hard +"quick-format-unescaped@npm:^4.0.3": + version: 4.0.4 + resolution: "quick-format-unescaped@npm:4.0.4" + checksum: fe5acc6f775b172ca5b4373df26f7e4fd347975578199e7d74b2ae4077f0af05baa27d231de1e80e8f72d88275ccc6028568a7a8c9ee5e7368ace0e18eff93a4 + languageName: node + linkType: hard + "raf-schd@npm:^4.0.3": version: 4.0.3 resolution: "raf-schd@npm:4.0.3" @@ -13786,8 +13847,8 @@ __metadata: linkType: hard "react-redux@npm:^9.0.4": - version: 9.0.4 - resolution: "react-redux@npm:9.0.4" + version: 9.1.0 + resolution: "react-redux@npm:9.1.0" dependencies: "@types/use-sync-external-store": "npm:^0.0.3" use-sync-external-store: "npm:^1.0.0" @@ -13803,7 +13864,7 @@ __metadata: optional: true redux: optional: true - checksum: 23af10014b129aeb051de729bde01de21175170b860deefb7ad83483feab5816253f770a4cea93333fc22a53ac9ac699b27f5c3705c388dab53dbcb2906a571a + checksum: 53161b5dc4d109020fbc42d26906ace92fed9ba1d7ab6274af60e9c0684583d20d1c8ec6d58601ac7b833c6468a652bbf3d4a102149d1793cb8a28b05b042f73 languageName: node linkType: hard @@ -14039,15 +14100,16 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:^4.1.0": - version: 4.4.0 - resolution: "readable-stream@npm:4.4.0" +"readable-stream@npm:^4.0.0": + version: 4.4.2 + resolution: "readable-stream@npm:4.4.2" dependencies: abort-controller: "npm:^3.0.0" buffer: "npm:^6.0.3" events: "npm:^3.3.0" process: "npm:^0.11.10" - checksum: 83f5a11285e5ebefb7b22a43ea77a2275075639325b4932a328a1fb0ee2475b83b9cc94326724d71c6aa3b60fa87e2b16623530b1cac34f3825dcea0996fdbe4 + string_decoder: "npm:^1.3.0" + checksum: cf7cc8daa2b57872d120945a20a1458c13dcb6c6f352505421115827b18ac4df0e483ac1fe195cb1f5cd226e1073fc55b92b569269d8299e8530840bcdbba40c languageName: node linkType: hard @@ -14071,6 +14133,13 @@ __metadata: languageName: node linkType: hard +"real-require@npm:^0.2.0": + version: 0.2.0 + resolution: "real-require@npm:0.2.0" + checksum: 23eea5623642f0477412ef8b91acd3969015a1501ed34992ada0e3af521d3c865bb2fe4cdbfec5fe4b505f6d1ef6a03e5c3652520837a8c3b53decff7e74b6a0 + languageName: node + linkType: hard + "redent@npm:^3.0.0": version: 3.0.0 resolution: "redent@npm:3.0.0" @@ -14616,6 +14685,13 @@ __metadata: languageName: node linkType: hard +"safe-stable-stringify@npm:^2.3.1": + version: 2.4.3 + resolution: "safe-stable-stringify@npm:2.4.3" + checksum: 81dede06b8f2ae794efd868b1e281e3c9000e57b39801c6c162267eb9efda17bd7a9eafa7379e1f1cacd528d4ced7c80d7460ad26f62ada7c9e01dec61b2e768 + languageName: node + linkType: hard + "safer-buffer@npm:>= 2.1.2 < 3, safer-buffer@npm:>= 2.1.2 < 3.0.0, safer-buffer@npm:^2.1.0": version: 2.1.2 resolution: "safer-buffer@npm:2.1.2" @@ -14649,15 +14725,15 @@ __metadata: linkType: hard "sass@npm:^1.62.1": - version: 1.69.7 - resolution: "sass@npm:1.69.7" + version: 1.70.0 + resolution: "sass@npm:1.70.0" dependencies: chokidar: "npm:>=3.0.0 <4.0.0" immutable: "npm:^4.0.0" source-map-js: "npm:>=0.6.2 <2.0.0" bin: sass: sass.js - checksum: 773d0938e7d4ff3972d3fda3132f34fe98a2f712e028a58e28fecd615434795eff3266eddc38d5e13f03b90c0d6360d0e737b30bff2949a47280c64a18e0fb18 + checksum: 7c309ee1c096d591746d122da9f1ebd65b4c4b3a60c2cc0ec720fd98fe1205fa8b44c9f563d113b9fdfeb25af1e32ec9b3e048bd4b8e05d267f020953bd7baf0 languageName: node linkType: hard @@ -14729,6 +14805,13 @@ __metadata: languageName: node linkType: hard +"secure-json-parse@npm:^2.4.0": + version: 2.7.0 + resolution: "secure-json-parse@npm:2.7.0" + checksum: f57eb6a44a38a3eeaf3548228585d769d788f59007454214fab9ed7f01fbf2e0f1929111da6db28cf0bcc1a2e89db5219a59e83eeaec3a54e413a0197ce879e4 + languageName: node + linkType: hard + "select-hose@npm:^2.0.0": version: 2.0.0 resolution: "select-hose@npm:2.0.0" @@ -15132,6 +15215,15 @@ __metadata: languageName: node linkType: hard +"sonic-boom@npm:^3.0.0, sonic-boom@npm:^3.7.0": + version: 3.7.0 + resolution: "sonic-boom@npm:3.7.0" + dependencies: + atomic-sleep: "npm:^1.0.0" + checksum: 57a3d560efb77f4576db111168ee2649c99e7869fda6ce0ec2a4e5458832d290ba58d74b073ddb5827d9a30f96d23cff79157993d919e1a6d5f28d8b6391c7f0 + languageName: node + linkType: hard + "source-list-map@npm:^2.0.0": version: 2.0.1 resolution: "source-list-map@npm:2.0.1" @@ -15290,7 +15382,7 @@ __metadata: languageName: node linkType: hard -"split2@npm:^4.1.0": +"split2@npm:^4.0.0, split2@npm:^4.1.0": version: 4.2.0 resolution: "split2@npm:4.2.0" checksum: b292beb8ce9215f8c642bb68be6249c5a4c7f332fc8ecadae7be5cbdf1ea95addc95f0459ef2e7ad9d45fd1064698a097e4eb211c83e772b49bc0ee423e91534 @@ -15455,7 +15547,7 @@ __metadata: languageName: node linkType: hard -"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^1.0.2 || 2 || 3 || 4, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": +"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": version: 4.2.3 resolution: "string-width@npm:4.2.3" dependencies: @@ -15548,7 +15640,7 @@ __metadata: languageName: node linkType: hard -"string_decoder@npm:^1.0.0, string_decoder@npm:^1.1.1": +"string_decoder@npm:^1.0.0, string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": version: 1.3.0 resolution: "string_decoder@npm:1.3.0" dependencies: @@ -16094,6 +16186,15 @@ __metadata: languageName: node linkType: hard +"thread-stream@npm:^2.0.0": + version: 2.4.1 + resolution: "thread-stream@npm:2.4.1" + dependencies: + real-require: "npm:^0.2.0" + checksum: ce29265810b9550ce896726301ff006ebfe96b90292728f07cfa4c379740585583046e2a8018afc53aca66b18fed12b33a84f3883e7ebc317185f6682898b8f8 + languageName: node + linkType: hard + "thunky@npm:^1.0.2": version: 1.1.0 resolution: "thunky@npm:1.1.0" @@ -17340,15 +17441,6 @@ __metadata: languageName: node linkType: hard -"wide-align@npm:^1.1.5": - version: 1.1.5 - resolution: "wide-align@npm:1.1.5" - dependencies: - string-width: "npm:^1.0.2 || 2 || 3 || 4" - checksum: 1d9c2a3e36dfb09832f38e2e699c367ef190f96b82c71f809bc0822c306f5379df87bab47bed27ea99106d86447e50eb972d3c516c2f95782807a9d082fbea95 - languageName: node - linkType: hard - "wildcard@npm:^2.0.0": version: 2.0.1 resolution: "wildcard@npm:2.0.1"