Merge commit 'f877aa9d70
' into kb_migration
This commit is contained in:
commit
32f0e619f0
440 changed files with 6249 additions and 3435 deletions
|
@ -82,6 +82,7 @@ class Account < ApplicationRecord
|
|||
include DomainNormalizable
|
||||
include DomainMaterializable
|
||||
include AccountMerging
|
||||
include AccountSearch
|
||||
|
||||
enum protocol: { ostatus: 0, activitypub: 1 }
|
||||
enum suspension_origin: { local: 0, remote: 1 }, _prefix: true
|
||||
|
@ -435,14 +436,6 @@ class Account < ApplicationRecord
|
|||
end
|
||||
|
||||
class << self
|
||||
DISALLOWED_TSQUERY_CHARACTERS = /['?\\:‘’]/
|
||||
TEXTSEARCH = "(setweight(to_tsvector('simple', accounts.display_name), 'A') || setweight(to_tsvector('simple', accounts.username), 'B') || setweight(to_tsvector('simple', coalesce(accounts.domain, '')), 'C'))"
|
||||
|
||||
REPUTATION_SCORE_FUNCTION = '(greatest(0, coalesce(s.followers_count, 0)) / (greatest(0, coalesce(s.following_count, 0)) + 1.0))'
|
||||
FOLLOWERS_SCORE_FUNCTION = 'log(greatest(0, coalesce(s.followers_count, 0)) + 2)'
|
||||
TIME_DISTANCE_FUNCTION = '(case when s.last_status_at is null then 0 else exp(-1.0 * ((greatest(0, abs(extract(DAY FROM age(s.last_status_at))) - 30.0)^2) / (2.0 * ((-1.0 * 30^2) / (2.0 * ln(0.3)))))) end)'
|
||||
BOOST = "((#{REPUTATION_SCORE_FUNCTION} + #{FOLLOWERS_SCORE_FUNCTION} + #{TIME_DISTANCE_FUNCTION}) / 3.0)"
|
||||
|
||||
def readonly_attributes
|
||||
super - %w(statuses_count following_count followers_count)
|
||||
end
|
||||
|
@ -452,37 +445,6 @@ class Account < ApplicationRecord
|
|||
DeliveryFailureTracker.without_unavailable(urls)
|
||||
end
|
||||
|
||||
def search_for(terms, limit: 10, offset: 0)
|
||||
tsquery = generate_query_for_search(terms)
|
||||
|
||||
sql = <<-SQL.squish
|
||||
SELECT
|
||||
accounts.*,
|
||||
#{BOOST} * ts_rank_cd(#{TEXTSEARCH}, to_tsquery('simple', :tsquery), 32) AS rank
|
||||
FROM accounts
|
||||
LEFT JOIN users ON accounts.id = users.account_id
|
||||
LEFT JOIN account_stats AS s ON accounts.id = s.account_id
|
||||
WHERE to_tsquery('simple', :tsquery) @@ #{TEXTSEARCH}
|
||||
AND accounts.suspended_at IS NULL
|
||||
AND accounts.moved_to_account_id IS NULL
|
||||
AND (accounts.domain IS NOT NULL OR (users.approved = TRUE AND users.confirmed_at IS NOT NULL))
|
||||
ORDER BY rank DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
SQL
|
||||
|
||||
records = find_by_sql([sql, limit: limit, offset: offset, tsquery: tsquery])
|
||||
ActiveRecord::Associations::Preloader.new.preload(records, :account_stat)
|
||||
records
|
||||
end
|
||||
|
||||
def advanced_search_for(terms, account, limit: 10, following: false, offset: 0)
|
||||
tsquery = generate_query_for_search(terms)
|
||||
sql = advanced_search_for_sql_template(following)
|
||||
records = find_by_sql([sql, id: account.id, limit: limit, offset: offset, tsquery: tsquery])
|
||||
ActiveRecord::Associations::Preloader.new.preload(records, :account_stat)
|
||||
records
|
||||
end
|
||||
|
||||
def from_text(text)
|
||||
return [] if text.blank?
|
||||
|
||||
|
@ -496,73 +458,15 @@ class Account < ApplicationRecord
|
|||
EntityCache.instance.mention(username, domain)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def generate_query_for_search(unsanitized_terms)
|
||||
terms = unsanitized_terms.gsub(DISALLOWED_TSQUERY_CHARACTERS, ' ')
|
||||
|
||||
# The final ":*" is for prefix search.
|
||||
# The trailing space does not seem to fit any purpose, but `to_tsquery`
|
||||
# behaves differently with and without a leading space if the terms start
|
||||
# with `./`, `../`, or `.. `. I don't understand why, so, in doubt, keep
|
||||
# the same query.
|
||||
"' #{terms} ':*"
|
||||
end
|
||||
|
||||
def advanced_search_for_sql_template(following)
|
||||
if following
|
||||
<<-SQL.squish
|
||||
WITH first_degree AS (
|
||||
SELECT target_account_id
|
||||
FROM follows
|
||||
WHERE account_id = :id
|
||||
UNION ALL
|
||||
SELECT :id
|
||||
)
|
||||
SELECT
|
||||
accounts.*,
|
||||
(count(f.id) + 1) * #{BOOST} * ts_rank_cd(#{TEXTSEARCH}, to_tsquery('simple', :tsquery), 32) AS rank
|
||||
FROM accounts
|
||||
LEFT OUTER JOIN follows AS f ON (accounts.id = f.account_id AND f.target_account_id = :id)
|
||||
LEFT JOIN account_stats AS s ON accounts.id = s.account_id
|
||||
WHERE accounts.id IN (SELECT * FROM first_degree)
|
||||
AND to_tsquery('simple', :tsquery) @@ #{TEXTSEARCH}
|
||||
AND accounts.suspended_at IS NULL
|
||||
AND accounts.moved_to_account_id IS NULL
|
||||
GROUP BY accounts.id, s.id
|
||||
ORDER BY rank DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
SQL
|
||||
else
|
||||
<<-SQL.squish
|
||||
SELECT
|
||||
accounts.*,
|
||||
#{BOOST} * ts_rank_cd(#{TEXTSEARCH}, to_tsquery('simple', :tsquery), 32) AS rank,
|
||||
count(f.id) AS followed
|
||||
FROM accounts
|
||||
LEFT OUTER JOIN follows AS f ON (accounts.id = f.account_id AND f.target_account_id = :id) OR (accounts.id = f.target_account_id AND f.account_id = :id)
|
||||
LEFT JOIN users ON accounts.id = users.account_id
|
||||
LEFT JOIN account_stats AS s ON accounts.id = s.account_id
|
||||
WHERE to_tsquery('simple', :tsquery) @@ #{TEXTSEARCH}
|
||||
AND accounts.suspended_at IS NULL
|
||||
AND accounts.moved_to_account_id IS NULL
|
||||
AND (accounts.domain IS NOT NULL OR (users.approved = TRUE AND users.confirmed_at IS NOT NULL))
|
||||
GROUP BY accounts.id, s.id
|
||||
ORDER BY followed DESC, rank DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def emojis
|
||||
@emojis ||= CustomEmoji.from_text(emojifiable_text, domain)
|
||||
end
|
||||
|
||||
before_create :generate_keys
|
||||
before_validation :prepare_contents, if: :local?
|
||||
before_validation :prepare_username, on: :create
|
||||
before_create :generate_keys
|
||||
before_destroy :clean_feed_manager
|
||||
|
||||
def ensure_keys!
|
||||
|
|
|
@ -17,14 +17,13 @@
|
|||
class AccountConversation < ApplicationRecord
|
||||
include Redisable
|
||||
|
||||
before_validation :set_last_status
|
||||
after_commit :push_to_streaming_api
|
||||
|
||||
belongs_to :account
|
||||
belongs_to :conversation
|
||||
belongs_to :last_status, class_name: 'Status'
|
||||
|
||||
before_validation :set_last_status
|
||||
|
||||
def participant_account_ids=(arr)
|
||||
self[:participant_account_ids] = arr.sort
|
||||
end
|
||||
|
|
|
@ -42,7 +42,7 @@ class AccountMigration < ApplicationRecord
|
|||
|
||||
return false unless errors.empty?
|
||||
|
||||
with_lock("account_migration:#{account.id}") do
|
||||
with_redis_lock("account_migration:#{account.id}") do
|
||||
save
|
||||
end
|
||||
end
|
||||
|
|
|
@ -37,16 +37,12 @@ class AccountStatusesFilter
|
|||
private
|
||||
|
||||
def initial_scope
|
||||
if suspended? || (domain_block&.reject_send_dissubscribable && @account.dissubscribable)
|
||||
Status.none
|
||||
elsif domain_block&.reject_send_media
|
||||
if (suspended? || (domain_block&.reject_send_dissubscribable && @account.dissubscribable)) || domain_block&.reject_send_media || blocked?
|
||||
Status.none
|
||||
elsif anonymous?
|
||||
account.statuses.where(visibility: %i(public unlisted public_unlisted))
|
||||
elsif author?
|
||||
account.statuses.all # NOTE: #merge! does not work without the #all
|
||||
elsif blocked?
|
||||
Status.none
|
||||
else
|
||||
filtered_scope
|
||||
end
|
||||
|
|
|
@ -18,7 +18,7 @@ class AccountSuggestions::Source
|
|||
def as_ordered_suggestions(scope, ordered_list)
|
||||
return [] if ordered_list.empty?
|
||||
|
||||
map = scope.index_by(&method(:to_ordered_list_key))
|
||||
map = scope.index_by { |account| to_ordered_list_key(account) }
|
||||
|
||||
ordered_list.map { |ordered_list_key| map[ordered_list_key] }.compact.map do |account|
|
||||
AccountSuggestions::Suggestion.new(
|
||||
|
|
|
@ -5,6 +5,8 @@ class Admin::AppealFilter
|
|||
status
|
||||
).freeze
|
||||
|
||||
IGNORED_PARAMS = %w(page).freeze
|
||||
|
||||
attr_reader :params
|
||||
|
||||
def initialize(params)
|
||||
|
@ -15,7 +17,7 @@ class Admin::AppealFilter
|
|||
scope = Appeal.order(id: :desc)
|
||||
|
||||
params.each do |key, value|
|
||||
next if %w(page).include?(key.to_s)
|
||||
next if IGNORED_PARAMS.include?(key.to_s)
|
||||
|
||||
scope.merge!(scope_for(key, value.to_s.strip)) if value.present?
|
||||
end
|
||||
|
|
|
@ -6,6 +6,8 @@ class Admin::StatusFilter
|
|||
report_id
|
||||
).freeze
|
||||
|
||||
IGNORED_PARAMS = %w(page report_id).freeze
|
||||
|
||||
attr_reader :params
|
||||
|
||||
def initialize(account, params)
|
||||
|
@ -17,7 +19,7 @@ class Admin::StatusFilter
|
|||
scope = @account.statuses.where(visibility: [:public, :unlisted, :public_unlisted])
|
||||
|
||||
params.each do |key, value|
|
||||
next if %w(page report_id).include?(key.to_s)
|
||||
next if IGNORED_PARAMS.include?(key.to_s)
|
||||
|
||||
scope.merge!(scope_for(key, value.to_s.strip)) if value.present?
|
||||
end
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#
|
||||
|
||||
class AnnouncementReaction < ApplicationRecord
|
||||
before_validation :set_custom_emoji
|
||||
after_commit :queue_publish
|
||||
|
||||
belongs_to :account
|
||||
|
@ -23,8 +24,6 @@ class AnnouncementReaction < ApplicationRecord
|
|||
validates :name, presence: true
|
||||
validates_with ReactionValidator
|
||||
|
||||
before_validation :set_custom_emoji
|
||||
|
||||
private
|
||||
|
||||
def set_custom_emoji
|
||||
|
|
|
@ -25,8 +25,8 @@ class Block < ApplicationRecord
|
|||
false # Force uri_for to use uri attribute
|
||||
end
|
||||
|
||||
after_commit :remove_blocking_cache
|
||||
before_validation :set_uri, only: :create
|
||||
after_commit :remove_blocking_cache
|
||||
|
||||
private
|
||||
|
||||
|
|
53
app/models/bulk_import.rb
Normal file
53
app/models/bulk_import.rb
Normal file
|
@ -0,0 +1,53 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# == Schema Information
|
||||
#
|
||||
# Table name: bulk_imports
|
||||
#
|
||||
# id :bigint(8) not null, primary key
|
||||
# type :integer not null
|
||||
# state :integer not null
|
||||
# total_items :integer default(0), not null
|
||||
# imported_items :integer default(0), not null
|
||||
# processed_items :integer default(0), not null
|
||||
# finished_at :datetime
|
||||
# overwrite :boolean default(FALSE), not null
|
||||
# likely_mismatched :boolean default(FALSE), not null
|
||||
# original_filename :string default(""), not null
|
||||
# account_id :bigint(8) not null
|
||||
# created_at :datetime not null
|
||||
# updated_at :datetime not null
|
||||
#
|
||||
class BulkImport < ApplicationRecord
|
||||
self.inheritance_column = false
|
||||
|
||||
belongs_to :account
|
||||
has_many :rows, class_name: 'BulkImportRow', inverse_of: :bulk_import, dependent: :delete_all
|
||||
|
||||
enum type: {
|
||||
following: 0,
|
||||
blocking: 1,
|
||||
muting: 2,
|
||||
domain_blocking: 3,
|
||||
bookmarks: 4,
|
||||
}
|
||||
|
||||
enum state: {
|
||||
unconfirmed: 0,
|
||||
scheduled: 1,
|
||||
in_progress: 2,
|
||||
finished: 3,
|
||||
}
|
||||
|
||||
validates :type, presence: true
|
||||
|
||||
def self.progress!(bulk_import_id, imported: false)
|
||||
# Use `increment_counter` so that the incrementation is done atomically in the database
|
||||
BulkImport.increment_counter(:processed_items, bulk_import_id) # rubocop:disable Rails/SkipsModelValidations
|
||||
BulkImport.increment_counter(:imported_items, bulk_import_id) if imported # rubocop:disable Rails/SkipsModelValidations
|
||||
|
||||
# Since the incrementation has been done atomically, concurrent access to `bulk_import` is now bening
|
||||
bulk_import = BulkImport.find(bulk_import_id)
|
||||
bulk_import.update!(state: :finished, finished_at: Time.now.utc) if bulk_import.processed_items == bulk_import.total_items
|
||||
end
|
||||
end
|
15
app/models/bulk_import_row.rb
Normal file
15
app/models/bulk_import_row.rb
Normal file
|
@ -0,0 +1,15 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
# == Schema Information
|
||||
#
|
||||
# Table name: bulk_import_rows
|
||||
#
|
||||
# id :bigint(8) not null, primary key
|
||||
# bulk_import_id :bigint(8) not null
|
||||
# data :jsonb
|
||||
# created_at :datetime not null
|
||||
# updated_at :datetime not null
|
||||
#
|
||||
class BulkImportRow < ApplicationRecord
|
||||
belongs_to :bulk_import
|
||||
end
|
|
@ -76,5 +76,8 @@ module AccountAssociations
|
|||
|
||||
# Account statuses cleanup policy
|
||||
has_one :statuses_cleanup_policy, class_name: 'AccountStatusesCleanupPolicy', inverse_of: :account, dependent: :destroy
|
||||
|
||||
# Imports
|
||||
has_many :bulk_imports, inverse_of: :account, dependent: :delete_all
|
||||
end
|
||||
end
|
||||
|
|
|
@ -275,7 +275,8 @@ module AccountInteractions
|
|||
end
|
||||
|
||||
def lists_for_local_distribution
|
||||
lists.joins(account: :user)
|
||||
scope = lists.joins(account: :user)
|
||||
scope.where.not(list_accounts: { follow_id: nil }).or(scope.where(account_id: id))
|
||||
.where('users.current_sign_in_at > ?', User::ACTIVE_DURATION.ago)
|
||||
end
|
||||
|
||||
|
|
140
app/models/concerns/account_search.rb
Normal file
140
app/models/concerns/account_search.rb
Normal file
|
@ -0,0 +1,140 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module AccountSearch
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
DISALLOWED_TSQUERY_CHARACTERS = /['?\\:‘’]/
|
||||
|
||||
TEXT_SEARCH_RANKS = <<~SQL.squish
|
||||
(
|
||||
setweight(to_tsvector('simple', accounts.display_name), 'A') ||
|
||||
setweight(to_tsvector('simple', accounts.username), 'B') ||
|
||||
setweight(to_tsvector('simple', coalesce(accounts.domain, '')), 'C')
|
||||
)
|
||||
SQL
|
||||
|
||||
REPUTATION_SCORE_FUNCTION = <<~SQL.squish
|
||||
(
|
||||
greatest(0, coalesce(s.followers_count, 0)) / (
|
||||
greatest(0, coalesce(s.following_count, 0)) + 1.0
|
||||
)
|
||||
)
|
||||
SQL
|
||||
|
||||
FOLLOWERS_SCORE_FUNCTION = <<~SQL.squish
|
||||
log(
|
||||
greatest(0, coalesce(s.followers_count, 0)) + 2
|
||||
)
|
||||
SQL
|
||||
|
||||
TIME_DISTANCE_FUNCTION = <<~SQL.squish
|
||||
(
|
||||
case
|
||||
when s.last_status_at is null then 0
|
||||
else exp(
|
||||
-1.0 * (
|
||||
(
|
||||
greatest(0, abs(extract(DAY FROM age(s.last_status_at))) - 30.0)^2) /#{' '}
|
||||
(2.0 * ((-1.0 * 30^2) / (2.0 * ln(0.3)))
|
||||
)
|
||||
)
|
||||
)
|
||||
end
|
||||
)
|
||||
SQL
|
||||
|
||||
BOOST = <<~SQL.squish
|
||||
(
|
||||
(#{REPUTATION_SCORE_FUNCTION} + #{FOLLOWERS_SCORE_FUNCTION} + #{TIME_DISTANCE_FUNCTION}) / 3.0
|
||||
)
|
||||
SQL
|
||||
|
||||
BASIC_SEARCH_SQL = <<~SQL.squish
|
||||
SELECT
|
||||
accounts.*,
|
||||
#{BOOST} * ts_rank_cd(#{TEXT_SEARCH_RANKS}, to_tsquery('simple', :tsquery), 32) AS rank
|
||||
FROM accounts
|
||||
LEFT JOIN users ON accounts.id = users.account_id
|
||||
LEFT JOIN account_stats AS s ON accounts.id = s.account_id
|
||||
WHERE to_tsquery('simple', :tsquery) @@ #{TEXT_SEARCH_RANKS}
|
||||
AND accounts.suspended_at IS NULL
|
||||
AND accounts.moved_to_account_id IS NULL
|
||||
AND (accounts.domain IS NOT NULL OR (users.approved = TRUE AND users.confirmed_at IS NOT NULL))
|
||||
ORDER BY rank DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
SQL
|
||||
|
||||
ADVANCED_SEARCH_WITH_FOLLOWING = <<~SQL.squish
|
||||
WITH first_degree AS (
|
||||
SELECT target_account_id
|
||||
FROM follows
|
||||
WHERE account_id = :id
|
||||
UNION ALL
|
||||
SELECT :id
|
||||
)
|
||||
SELECT
|
||||
accounts.*,
|
||||
(count(f.id) + 1) * #{BOOST} * ts_rank_cd(#{TEXT_SEARCH_RANKS}, to_tsquery('simple', :tsquery), 32) AS rank
|
||||
FROM accounts
|
||||
LEFT OUTER JOIN follows AS f ON (accounts.id = f.account_id AND f.target_account_id = :id)
|
||||
LEFT JOIN account_stats AS s ON accounts.id = s.account_id
|
||||
WHERE accounts.id IN (SELECT * FROM first_degree)
|
||||
AND to_tsquery('simple', :tsquery) @@ #{TEXT_SEARCH_RANKS}
|
||||
AND accounts.suspended_at IS NULL
|
||||
AND accounts.moved_to_account_id IS NULL
|
||||
GROUP BY accounts.id, s.id
|
||||
ORDER BY rank DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
SQL
|
||||
|
||||
ADVANCED_SEARCH_WITHOUT_FOLLOWING = <<~SQL.squish
|
||||
SELECT
|
||||
accounts.*,
|
||||
#{BOOST} * ts_rank_cd(#{TEXT_SEARCH_RANKS}, to_tsquery('simple', :tsquery), 32) AS rank,
|
||||
count(f.id) AS followed
|
||||
FROM accounts
|
||||
LEFT OUTER JOIN follows AS f ON
|
||||
(accounts.id = f.account_id AND f.target_account_id = :id) OR (accounts.id = f.target_account_id AND f.account_id = :id)
|
||||
LEFT JOIN users ON accounts.id = users.account_id
|
||||
LEFT JOIN account_stats AS s ON accounts.id = s.account_id
|
||||
WHERE to_tsquery('simple', :tsquery) @@ #{TEXT_SEARCH_RANKS}
|
||||
AND accounts.suspended_at IS NULL
|
||||
AND accounts.moved_to_account_id IS NULL
|
||||
AND (accounts.domain IS NOT NULL OR (users.approved = TRUE AND users.confirmed_at IS NOT NULL))
|
||||
GROUP BY accounts.id, s.id
|
||||
ORDER BY followed DESC, rank DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
SQL
|
||||
|
||||
class_methods do
|
||||
def search_for(terms, limit: 10, offset: 0)
|
||||
tsquery = generate_query_for_search(terms)
|
||||
|
||||
find_by_sql([BASIC_SEARCH_SQL, { limit: limit, offset: offset, tsquery: tsquery }]).tap do |records|
|
||||
ActiveRecord::Associations::Preloader.new.preload(records, :account_stat)
|
||||
end
|
||||
end
|
||||
|
||||
def advanced_search_for(terms, account, limit: 10, following: false, offset: 0)
|
||||
tsquery = generate_query_for_search(terms)
|
||||
sql_template = following ? ADVANCED_SEARCH_WITH_FOLLOWING : ADVANCED_SEARCH_WITHOUT_FOLLOWING
|
||||
|
||||
find_by_sql([sql_template, { id: account.id, limit: limit, offset: offset, tsquery: tsquery }]).tap do |records|
|
||||
ActiveRecord::Associations::Preloader.new.preload(records, :account_stat)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def generate_query_for_search(unsanitized_terms)
|
||||
terms = unsanitized_terms.gsub(DISALLOWED_TSQUERY_CHARACTERS, ' ')
|
||||
|
||||
# The final ":*" is for prefix search.
|
||||
# The trailing space does not seem to fit any purpose, but `to_tsquery`
|
||||
# behaves differently with and without a leading space if the terms start
|
||||
# with `./`, `../`, or `.. `. I don't understand why, so, in doubt, keep
|
||||
# the same query.
|
||||
"' #{terms} ':*"
|
||||
end
|
||||
end
|
||||
end
|
|
@ -5,7 +5,7 @@ module Lockable
|
|||
# @param [ActiveSupport::Duration] autorelease Automatically release the lock after this time
|
||||
# @param [Boolean] raise_on_failure Raise an error if a lock cannot be acquired, or fail silently
|
||||
# @raise [Mastodon::RaceConditionError]
|
||||
def with_lock(lock_name, autorelease: 15.minutes, raise_on_failure: true)
|
||||
def with_redis_lock(lock_name, autorelease: 15.minutes, raise_on_failure: true)
|
||||
with_redis do |redis|
|
||||
RedisLock.acquire(redis: redis, key: "lock:#{lock_name}", autorelease: autorelease.seconds) do |lock|
|
||||
if lock.acquired?
|
||||
|
|
72
app/models/concerns/status_safe_reblog_insert.rb
Normal file
72
app/models/concerns/status_safe_reblog_insert.rb
Normal file
|
@ -0,0 +1,72 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module StatusSafeReblogInsert
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
class_methods do
|
||||
# This is a hack to ensure that no reblogs of discarded statuses are created,
|
||||
# as this cannot be enforced through database constraints the same way we do
|
||||
# for reblogs of deleted statuses.
|
||||
#
|
||||
# To achieve this, we redefine the internal method responsible for issuing
|
||||
# the "INSERT" statement and replace the "INSERT INTO ... VALUES ..." query
|
||||
# with an "INSERT INTO ... SELECT ..." query with a "WHERE deleted_at IS NULL"
|
||||
# clause on the reblogged status to ensure consistency at the database level.
|
||||
#
|
||||
# Otherwise, the code is kept as close as possible to ActiveRecord::Persistence
|
||||
# code, and actually calls it if we are not handling a reblog.
|
||||
def _insert_record(values)
|
||||
return super unless values.is_a?(Hash) && values['reblog_of_id'].present?
|
||||
|
||||
primary_key = self.primary_key
|
||||
primary_key_value = nil
|
||||
|
||||
if primary_key
|
||||
primary_key_value = values[primary_key]
|
||||
|
||||
if !primary_key_value && prefetch_primary_key?
|
||||
primary_key_value = next_sequence_value
|
||||
values[primary_key] = primary_key_value
|
||||
end
|
||||
end
|
||||
|
||||
# The following line is where we differ from stock ActiveRecord implementation
|
||||
im = _compile_reblog_insert(values)
|
||||
|
||||
# Since we are using SELECT instead of VALUES, a non-error `nil` return is possible.
|
||||
# For our purposes, it's equivalent to a foreign key constraint violation
|
||||
result = connection.insert(im, "#{self} Create", primary_key || false, primary_key_value)
|
||||
raise ActiveRecord::InvalidForeignKey, "(reblog_of_id)=(#{values['reblog_of_id']}) is not present in table \"statuses\"" if result.nil?
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
def _compile_reblog_insert(values)
|
||||
# This is somewhat equivalent to the following code of ActiveRecord::Persistence:
|
||||
# `arel_table.compile_insert(_substitute_values(values))`
|
||||
# The main difference is that we use a `SELECT` instead of a `VALUES` clause,
|
||||
# which means we have to build the `SELECT` clause ourselves and do a bit more
|
||||
# manual work.
|
||||
|
||||
# Instead of using Arel::InsertManager#values, we are going to use Arel::InsertManager#select
|
||||
im = Arel::InsertManager.new
|
||||
im.into(arel_table)
|
||||
|
||||
binds = []
|
||||
reblog_bind = nil
|
||||
values.each do |name, value|
|
||||
attr = arel_table[name]
|
||||
bind = predicate_builder.build_bind_attribute(attr.name, value)
|
||||
|
||||
im.columns << attr
|
||||
binds << bind
|
||||
|
||||
reblog_bind = bind if name == 'reblog_of_id'
|
||||
end
|
||||
|
||||
im.select(arel_table.where(arel_table[:id].eq(reblog_bind)).where(arel_table[:deleted_at].eq(nil)).project(*binds))
|
||||
|
||||
im
|
||||
end
|
||||
end
|
||||
end
|
|
@ -32,7 +32,8 @@ class FollowRequest < ApplicationRecord
|
|||
validates :languages, language: true
|
||||
|
||||
def authorize!
|
||||
account.follow!(target_account, reblogs: show_reblogs, notify: notify, languages: languages, uri: uri, bypass_limit: true)
|
||||
follow = account.follow!(target_account, reblogs: show_reblogs, notify: notify, languages: languages, uri: uri, bypass_limit: true)
|
||||
ListAccount.where(follow_request: self).update_all(follow_request_id: nil, follow_id: follow.id) # rubocop:disable Rails/SkipsModelValidations
|
||||
MergeWorker.perform_async(target_account.id, account.id) if account.local?
|
||||
destroy!
|
||||
end
|
||||
|
|
151
app/models/form/import.rb
Normal file
151
app/models/form/import.rb
Normal file
|
@ -0,0 +1,151 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require 'csv'
|
||||
|
||||
# A non-ActiveRecord helper class for CSV uploads.
|
||||
# Handles saving contents to database.
|
||||
class Form::Import
|
||||
include ActiveModel::Model
|
||||
|
||||
MODES = %i(merge overwrite).freeze
|
||||
|
||||
FILE_SIZE_LIMIT = 20.megabytes
|
||||
ROWS_PROCESSING_LIMIT = 20_000
|
||||
|
||||
EXPECTED_HEADERS_BY_TYPE = {
|
||||
following: ['Account address', 'Show boosts', 'Notify on new posts', 'Languages'],
|
||||
blocking: ['Account address'],
|
||||
muting: ['Account address', 'Hide notifications'],
|
||||
domain_blocking: ['#domain'],
|
||||
bookmarks: ['#uri'],
|
||||
}.freeze
|
||||
|
||||
KNOWN_FIRST_HEADERS = EXPECTED_HEADERS_BY_TYPE.values.map(&:first).uniq.freeze
|
||||
|
||||
ATTRIBUTE_BY_HEADER = {
|
||||
'Account address' => 'acct',
|
||||
'Show boosts' => 'show_reblogs',
|
||||
'Notify on new posts' => 'notify',
|
||||
'Languages' => 'languages',
|
||||
'Hide notifications' => 'hide_notifications',
|
||||
'#domain' => 'domain',
|
||||
'#uri' => 'uri',
|
||||
}.freeze
|
||||
|
||||
class EmptyFileError < StandardError; end
|
||||
|
||||
attr_accessor :current_account, :data, :type, :overwrite, :bulk_import
|
||||
|
||||
validates :type, presence: true
|
||||
validates :data, presence: true
|
||||
validate :validate_data
|
||||
|
||||
def guessed_type
|
||||
return :muting if csv_data.headers.include?('Hide notifications')
|
||||
return :following if csv_data.headers.include?('Show boosts') || csv_data.headers.include?('Notify on new posts') || csv_data.headers.include?('Languages')
|
||||
return :following if data.original_filename&.start_with?('follows') || data.original_filename&.start_with?('following_accounts')
|
||||
return :blocking if data.original_filename&.start_with?('blocks') || data.original_filename&.start_with?('blocked_accounts')
|
||||
return :muting if data.original_filename&.start_with?('mutes') || data.original_filename&.start_with?('muted_accounts')
|
||||
return :domain_blocking if data.original_filename&.start_with?('domain_blocks') || data.original_filename&.start_with?('blocked_domains')
|
||||
return :bookmarks if data.original_filename&.start_with?('bookmarks')
|
||||
end
|
||||
|
||||
# Whether the uploaded CSV file seems to correspond to a different import type than the one selected
|
||||
def likely_mismatched?
|
||||
guessed_type.present? && guessed_type != type.to_sym
|
||||
end
|
||||
|
||||
def save
|
||||
return false unless valid?
|
||||
|
||||
ApplicationRecord.transaction do
|
||||
now = Time.now.utc
|
||||
@bulk_import = current_account.bulk_imports.create(type: type, overwrite: overwrite || false, state: :unconfirmed, original_filename: data.original_filename, likely_mismatched: likely_mismatched?)
|
||||
nb_items = BulkImportRow.insert_all(parsed_rows.map { |row| { bulk_import_id: bulk_import.id, data: row, created_at: now, updated_at: now } }).length # rubocop:disable Rails/SkipsModelValidations
|
||||
@bulk_import.update(total_items: nb_items)
|
||||
end
|
||||
end
|
||||
|
||||
def mode
|
||||
overwrite ? :overwrite : :merge
|
||||
end
|
||||
|
||||
def mode=(str)
|
||||
self.overwrite = str.to_sym == :overwrite
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def default_csv_header
|
||||
case type.to_sym
|
||||
when :following, :blocking, :muting
|
||||
'Account address'
|
||||
when :domain_blocking
|
||||
'#domain'
|
||||
when :bookmarks
|
||||
'#uri'
|
||||
end
|
||||
end
|
||||
|
||||
def csv_data
|
||||
return @csv_data if defined?(@csv_data)
|
||||
|
||||
csv_converter = lambda do |field, field_info|
|
||||
case field_info.header
|
||||
when 'Show boosts', 'Notify on new posts', 'Hide notifications'
|
||||
ActiveModel::Type::Boolean.new.cast(field)
|
||||
when 'Languages'
|
||||
field&.split(',')&.map(&:strip)&.presence
|
||||
when 'Account address'
|
||||
field.strip.gsub(/\A@/, '')
|
||||
when '#domain', '#uri'
|
||||
field.strip
|
||||
else
|
||||
field
|
||||
end
|
||||
end
|
||||
|
||||
@csv_data = CSV.open(data.path, encoding: 'UTF-8', skip_blanks: true, headers: true, converters: csv_converter)
|
||||
@csv_data.take(1) # Ensure the headers are read
|
||||
raise EmptyFileError if @csv_data.headers == true
|
||||
|
||||
@csv_data = CSV.open(data.path, encoding: 'UTF-8', skip_blanks: true, headers: [default_csv_header], converters: csv_converter) unless KNOWN_FIRST_HEADERS.include?(@csv_data.headers&.first)
|
||||
@csv_data
|
||||
end
|
||||
|
||||
def csv_row_count
|
||||
return @csv_row_count if defined?(@csv_row_count)
|
||||
|
||||
csv_data.rewind
|
||||
@csv_row_count = csv_data.take(ROWS_PROCESSING_LIMIT + 2).count
|
||||
end
|
||||
|
||||
def parsed_rows
|
||||
csv_data.rewind
|
||||
|
||||
expected_headers = EXPECTED_HEADERS_BY_TYPE[type.to_sym]
|
||||
|
||||
csv_data.take(ROWS_PROCESSING_LIMIT + 1).map do |row|
|
||||
row.to_h.slice(*expected_headers).transform_keys { |key| ATTRIBUTE_BY_HEADER[key] }
|
||||
end
|
||||
end
|
||||
|
||||
def validate_data
|
||||
return if data.nil?
|
||||
return errors.add(:data, I18n.t('imports.errors.too_large')) if data.size > FILE_SIZE_LIMIT
|
||||
return errors.add(:data, I18n.t('imports.errors.incompatible_type')) unless csv_data.headers.include?(default_csv_header)
|
||||
|
||||
errors.add(:data, I18n.t('imports.errors.over_rows_processing_limit', count: ROWS_PROCESSING_LIMIT)) if csv_row_count > ROWS_PROCESSING_LIMIT
|
||||
|
||||
if type.to_sym == :following
|
||||
base_limit = FollowLimitValidator.limit_for_account(current_account)
|
||||
limit = base_limit
|
||||
limit -= current_account.following_count unless overwrite
|
||||
errors.add(:data, I18n.t('users.follow_limit_reached', limit: base_limit)) if csv_row_count > limit
|
||||
end
|
||||
rescue CSV::MalformedCSVError => e
|
||||
errors.add(:data, I18n.t('imports.errors.invalid_csv_file', error: e.message))
|
||||
rescue EmptyFileError
|
||||
errors.add(:data, I18n.t('imports.errors.empty'))
|
||||
end
|
||||
end
|
|
@ -17,6 +17,9 @@
|
|||
# overwrite :boolean default(FALSE), not null
|
||||
#
|
||||
|
||||
# NOTE: This is a deprecated model, only kept to not break ongoing imports
|
||||
# on upgrade. See `BulkImport` and `Form::Import` for its replacements.
|
||||
|
||||
class Import < ApplicationRecord
|
||||
FILE_TYPES = %w(text/plain text/csv application/csv).freeze
|
||||
MODES = %i(merge overwrite).freeze
|
||||
|
@ -28,7 +31,6 @@ class Import < ApplicationRecord
|
|||
enum type: { following: 0, blocking: 1, muting: 2, domain_blocking: 3, bookmarks: 4 }
|
||||
|
||||
validates :type, presence: true
|
||||
validates_with ImportValidator, on: :create
|
||||
|
||||
has_attached_file :data
|
||||
validates_attachment_content_type :data, content_type: FILE_TYPES
|
||||
|
|
|
@ -4,24 +4,39 @@
|
|||
#
|
||||
# Table name: list_accounts
|
||||
#
|
||||
# id :bigint(8) not null, primary key
|
||||
# list_id :bigint(8) not null
|
||||
# account_id :bigint(8) not null
|
||||
# follow_id :bigint(8)
|
||||
# id :bigint(8) not null, primary key
|
||||
# list_id :bigint(8) not null
|
||||
# account_id :bigint(8) not null
|
||||
# follow_id :bigint(8)
|
||||
# follow_request_id :bigint(8)
|
||||
#
|
||||
|
||||
class ListAccount < ApplicationRecord
|
||||
belongs_to :list
|
||||
belongs_to :account
|
||||
belongs_to :follow, optional: true
|
||||
belongs_to :follow_request, optional: true
|
||||
|
||||
validates :account_id, uniqueness: { scope: :list_id }
|
||||
validate :validate_relationship
|
||||
|
||||
before_validation :set_follow
|
||||
|
||||
private
|
||||
|
||||
def set_follow
|
||||
self.follow = Follow.find_by!(account_id: list.account_id, target_account_id: account.id) unless list.account_id == account.id
|
||||
return if list.account_id == account.id
|
||||
|
||||
self.follow = Follow.find_by!(account_id: list.account_id, target_account_id: account.id)
|
||||
rescue ActiveRecord::RecordNotFound
|
||||
self.follow_request = FollowRequest.find_by!(account_id: list.account_id, target_account_id: account.id)
|
||||
end
|
||||
|
||||
def validate_relationship
|
||||
return if list.account_id == account_id
|
||||
|
||||
errors.add(:account_id, 'follow relationship missing') if follow_id.nil? && follow_request_id.nil?
|
||||
errors.add(:follow, 'mismatched accounts') if follow_id.present? && follow.target_account_id != account_id
|
||||
errors.add(:follow_request, 'mismatched accounts') if follow_request_id.present? && follow_request.target_account_id != account_id
|
||||
end
|
||||
end
|
||||
|
|
|
@ -38,8 +38,8 @@ class MediaAttachment < ApplicationRecord
|
|||
LOCAL_STATUS_ATTACHMENT_MAX = 4
|
||||
ACTIVITYPUB_STATUS_ATTACHMENT_MAX = 16
|
||||
|
||||
enum type: { :image => 0, :gifv => 1, :video => 2, :unknown => 3, :audio => 4 }
|
||||
enum processing: { :queued => 0, :in_progress => 1, :complete => 2, :failed => 3 }, _prefix: true
|
||||
enum type: { image: 0, gifv: 1, video: 2, unknown: 3, audio: 4 }
|
||||
enum processing: { queued: 0, in_progress: 1, complete: 2, failed: 3 }, _prefix: true
|
||||
|
||||
MAX_DESCRIPTION_LENGTH = 1_500
|
||||
|
||||
|
@ -139,7 +139,7 @@ class MediaAttachment < ApplicationRecord
|
|||
convert_options: {
|
||||
output: {
|
||||
'loglevel' => 'fatal',
|
||||
vf: 'scale=\'min(400\, iw):min(400\, ih)\':force_original_aspect_ratio=decrease',
|
||||
:vf => 'scale=\'min(400\, iw):min(400\, ih)\':force_original_aspect_ratio=decrease',
|
||||
}.freeze,
|
||||
}.freeze,
|
||||
format: 'png',
|
||||
|
@ -173,6 +173,8 @@ class MediaAttachment < ApplicationRecord
|
|||
original: IMAGE_STYLES[:small].freeze,
|
||||
}.freeze
|
||||
|
||||
DEFAULT_STYLES = [:original].freeze
|
||||
|
||||
GLOBAL_CONVERT_OPTIONS = {
|
||||
all: '-quality 90 +profile "!icc,*" +set modify-date +set create-date',
|
||||
}.freeze
|
||||
|
@ -280,12 +282,12 @@ class MediaAttachment < ApplicationRecord
|
|||
full_asset_url(file.url(:original))
|
||||
end
|
||||
|
||||
after_commit :enqueue_processing, on: :create
|
||||
after_commit :reset_parent_cache, on: :update
|
||||
|
||||
before_create :set_unknown_type
|
||||
before_create :set_processing
|
||||
|
||||
after_commit :enqueue_processing, on: :create
|
||||
after_commit :reset_parent_cache, on: :update
|
||||
|
||||
after_post_process :set_meta
|
||||
|
||||
class << self
|
||||
|
|
|
@ -10,6 +10,8 @@ class RelationshipFilter
|
|||
location
|
||||
).freeze
|
||||
|
||||
IGNORED_PARAMS = %w(relationship page).freeze
|
||||
|
||||
attr_reader :params, :account
|
||||
|
||||
def initialize(account, params)
|
||||
|
@ -23,7 +25,7 @@ class RelationshipFilter
|
|||
scope = scope_for('relationship', params['relationship'].to_s.strip)
|
||||
|
||||
params.each do |key, value|
|
||||
next if %w(relationship page).include?(key)
|
||||
next if IGNORED_PARAMS.include?(key)
|
||||
|
||||
scope.merge!(scope_for(key.to_s, value.to_s.strip)) if value.present?
|
||||
end
|
||||
|
|
|
@ -36,8 +36,8 @@ class SessionActivation < ApplicationRecord
|
|||
detection.platform.id
|
||||
end
|
||||
|
||||
before_create :assign_access_token
|
||||
before_save :assign_user_agent
|
||||
before_create :assign_access_token
|
||||
|
||||
class << self
|
||||
def active?(id)
|
||||
|
|
|
@ -34,14 +34,13 @@
|
|||
require 'ostruct'
|
||||
|
||||
class Status < ApplicationRecord
|
||||
before_destroy :unlink_from_conversations!
|
||||
|
||||
include Discard::Model
|
||||
include Paginable
|
||||
include Cacheable
|
||||
include StatusThreadingConcern
|
||||
include StatusSnapshotConcern
|
||||
include RateLimitable
|
||||
include StatusSafeReblogInsert
|
||||
|
||||
rate_limit by: :account, family: :statuses
|
||||
|
||||
|
@ -123,6 +122,27 @@ class Status < ApplicationRecord
|
|||
after_create_commit :trigger_create_webhooks
|
||||
after_update_commit :trigger_update_webhooks
|
||||
|
||||
after_create_commit :increment_counter_caches
|
||||
after_destroy_commit :decrement_counter_caches
|
||||
|
||||
after_create_commit :store_uri, if: :local?
|
||||
after_create_commit :update_statistics, if: :local?
|
||||
|
||||
before_validation :prepare_contents, if: :local?
|
||||
before_validation :set_reblog
|
||||
before_validation :set_visibility
|
||||
before_validation :set_searchability
|
||||
before_validation :set_conversation
|
||||
before_validation :set_local
|
||||
|
||||
around_create Mastodon::Snowflake::Callbacks
|
||||
|
||||
after_create :set_poll_id
|
||||
|
||||
# The `prepend: true` option below ensures this runs before
|
||||
# the `dependent: destroy` callbacks remove relevant records
|
||||
before_destroy :unlink_from_conversations!, prepend: true
|
||||
|
||||
cache_associated :application,
|
||||
:media_attachments,
|
||||
:conversation,
|
||||
|
@ -379,23 +399,6 @@ class Status < ApplicationRecord
|
|||
compute_searchability
|
||||
end
|
||||
|
||||
after_create_commit :increment_counter_caches
|
||||
after_destroy_commit :decrement_counter_caches
|
||||
|
||||
after_create_commit :store_uri, if: :local?
|
||||
after_create_commit :update_statistics, if: :local?
|
||||
|
||||
before_validation :prepare_contents, if: :local?
|
||||
before_validation :set_reblog
|
||||
before_validation :set_visibility
|
||||
before_validation :set_searchability
|
||||
before_validation :set_conversation
|
||||
before_validation :set_local
|
||||
|
||||
around_create Mastodon::Snowflake::Callbacks
|
||||
|
||||
after_create :set_poll_id
|
||||
|
||||
class << self
|
||||
def selectable_visibilities
|
||||
visibilities.keys - %w(direct limited)
|
||||
|
@ -468,71 +471,6 @@ class Status < ApplicationRecord
|
|||
super || build_status_stat
|
||||
end
|
||||
|
||||
# This is a hack to ensure that no reblogs of discarded statuses are created,
|
||||
# as this cannot be enforced through database constraints the same way we do
|
||||
# for reblogs of deleted statuses.
|
||||
#
|
||||
# To achieve this, we redefine the internal method responsible for issuing
|
||||
# the "INSERT" statement and replace the "INSERT INTO ... VALUES ..." query
|
||||
# with an "INSERT INTO ... SELECT ..." query with a "WHERE deleted_at IS NULL"
|
||||
# clause on the reblogged status to ensure consistency at the database level.
|
||||
#
|
||||
# Otherwise, the code is kept as close as possible to ActiveRecord::Persistence
|
||||
# code, and actually calls it if we are not handling a reblog.
|
||||
def self._insert_record(values)
|
||||
return super unless values.is_a?(Hash) && values['reblog_of_id'].present?
|
||||
|
||||
primary_key = self.primary_key
|
||||
primary_key_value = nil
|
||||
|
||||
if primary_key
|
||||
primary_key_value = values[primary_key]
|
||||
|
||||
if !primary_key_value && prefetch_primary_key?
|
||||
primary_key_value = next_sequence_value
|
||||
values[primary_key] = primary_key_value
|
||||
end
|
||||
end
|
||||
|
||||
# The following line is where we differ from stock ActiveRecord implementation
|
||||
im = _compile_reblog_insert(values)
|
||||
|
||||
# Since we are using SELECT instead of VALUES, a non-error `nil` return is possible.
|
||||
# For our purposes, it's equivalent to a foreign key constraint violation
|
||||
result = connection.insert(im, "#{self} Create", primary_key || false, primary_key_value)
|
||||
raise ActiveRecord::InvalidForeignKey, "(reblog_of_id)=(#{values['reblog_of_id']}) is not present in table \"statuses\"" if result.nil?
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
def self._compile_reblog_insert(values)
|
||||
# This is somewhat equivalent to the following code of ActiveRecord::Persistence:
|
||||
# `arel_table.compile_insert(_substitute_values(values))`
|
||||
# The main difference is that we use a `SELECT` instead of a `VALUES` clause,
|
||||
# which means we have to build the `SELECT` clause ourselves and do a bit more
|
||||
# manual work.
|
||||
|
||||
# Instead of using Arel::InsertManager#values, we are going to use Arel::InsertManager#select
|
||||
im = Arel::InsertManager.new
|
||||
im.into(arel_table)
|
||||
|
||||
binds = []
|
||||
reblog_bind = nil
|
||||
values.each do |name, value|
|
||||
attr = arel_table[name]
|
||||
bind = predicate_builder.build_bind_attribute(attr.name, value)
|
||||
|
||||
im.columns << attr
|
||||
binds << bind
|
||||
|
||||
reblog_bind = bind if name == 'reblog_of_id'
|
||||
end
|
||||
|
||||
im.select(arel_table.where(arel_table[:id].eq(reblog_bind)).where(arel_table[:deleted_at].eq(nil)).project(*binds))
|
||||
|
||||
im
|
||||
end
|
||||
|
||||
def discard_with_reblogs
|
||||
discard_time = Time.current
|
||||
Status.unscoped.where(reblog_of_id: id, deleted_at: [nil, deleted_at]).in_batches.update_all(deleted_at: discard_time) unless reblog?
|
||||
|
|
|
@ -11,7 +11,7 @@ class Trends::History
|
|||
end
|
||||
|
||||
def uses
|
||||
with_redis { |redis| redis.mget(*@days.map { |day| day.key_for(:uses) }).map(&:to_i).sum }
|
||||
with_redis { |redis| redis.mget(*@days.map { |day| day.key_for(:uses) }).sum(&:to_i) }
|
||||
end
|
||||
|
||||
def accounts
|
||||
|
|
|
@ -6,6 +6,8 @@ class Trends::PreviewCardFilter
|
|||
locale
|
||||
).freeze
|
||||
|
||||
IGNORED_PARAMS = %w(page).freeze
|
||||
|
||||
attr_reader :params
|
||||
|
||||
def initialize(params)
|
||||
|
@ -16,7 +18,7 @@ class Trends::PreviewCardFilter
|
|||
scope = initial_scope
|
||||
|
||||
params.each do |key, value|
|
||||
next if %w(page).include?(key.to_s)
|
||||
next if IGNORED_PARAMS.include?(key.to_s)
|
||||
|
||||
scope.merge!(scope_for(key, value.to_s.strip)) if value.present?
|
||||
end
|
||||
|
|
|
@ -6,6 +6,8 @@ class Trends::StatusFilter
|
|||
locale
|
||||
).freeze
|
||||
|
||||
IGNORED_PARAMS = %w(page).freeze
|
||||
|
||||
attr_reader :params
|
||||
|
||||
def initialize(params)
|
||||
|
@ -16,7 +18,7 @@ class Trends::StatusFilter
|
|||
scope = initial_scope
|
||||
|
||||
params.each do |key, value|
|
||||
next if %w(page).include?(key.to_s)
|
||||
next if IGNORED_PARAMS.include?(key.to_s)
|
||||
|
||||
scope.merge!(scope_for(key, value.to_s.strip)) if value.present?
|
||||
end
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue