INSTRUCTION
stringlengths 202
35.5k
| RESPONSE
stringlengths 75
161k
|
---|---|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Topics
class MergeService
attr_accessor :source_topic, :target_topic
def initialize(source_topic, target_topic)
@source_topic = source_topic
@target_topic = target_topic
end
def execute
validate_parameters!
::Projects::ProjectTopic.transaction do
move_project_topics
refresh_target_topic_counters
delete_source_topic
end
ServiceResponse.success
rescue ArgumentError => e
ServiceResponse.error(message: e.message)
rescue StandardError => e
Gitlab::ErrorTracking.track_exception(e, source_topic_id: source_topic.id, target_topic_id: target_topic.id)
ServiceResponse.error(message: _('Topics could not be merged!'))
end
private
def validate_parameters!
raise ArgumentError, _('The source topic is not a topic.') unless source_topic.is_a?(Projects::Topic)
raise ArgumentError, _('The target topic is not a topic.') unless target_topic.is_a?(Projects::Topic)
raise ArgumentError, _('The source topic and the target topic are identical.') if source_topic == target_topic
end
# rubocop: disable CodeReuse/ActiveRecord
def move_project_topics
project_ids_for_projects_currently_using_source_and_target = ::Projects::ProjectTopic
.where(topic_id: target_topic).select(:project_id)
# Only update for projects that exclusively use the source topic
::Projects::ProjectTopic.where(topic_id: source_topic.id)
.where.not(project_id: project_ids_for_projects_currently_using_source_and_target)
.update_all(topic_id: target_topic.id)
# Delete source topic for projects that were using source and target
::Projects::ProjectTopic.where(topic_id: source_topic.id).delete_all
end
def refresh_target_topic_counters
target_topic.update!(
total_projects_count: total_projects_count(target_topic.id),
non_private_projects_count: non_private_projects_count(target_topic.id)
)
end
def delete_source_topic
source_topic.destroy!
end
def total_projects_count(topic_id)
::Projects::ProjectTopic.where(topic_id: topic_id).count
end
def non_private_projects_count(topic_id)
::Projects::ProjectTopic.joins(:project).where(topic_id: topic_id).where('projects.visibility_level in (10, 20)')
.count
end
# rubocop: enable CodeReuse/ActiveRecord
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Topics::MergeService, feature_category: :shared do
let_it_be(:source_topic) { create(:topic, name: 'source_topic') }
let_it_be(:target_topic) { create(:topic, name: 'target_topic') }
let_it_be(:project_1) { create(:project, :public, topic_list: source_topic.name) }
let_it_be(:project_2) { create(:project, :private, topic_list: source_topic.name) }
let_it_be(:project_3) { create(:project, :public, topic_list: target_topic.name) }
let_it_be(:project_4) { create(:project, :public, topic_list: [source_topic.name, target_topic.name]) }
subject { described_class.new(source_topic, target_topic).execute }
describe '#execute' do
it 'merges source topic into target topic' do
subject
expect(target_topic.projects).to contain_exactly(project_1, project_2, project_3, project_4)
expect { source_topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
it 'refreshes counters of target topic' do
expect { subject }
.to change { target_topic.reload.total_projects_count }.by(2)
.and change { target_topic.reload.non_private_projects_count }.by(1)
end
context 'when source topic fails to delete' do
it 'reverts previous changes' do
allow(source_topic.reload).to receive(:destroy!).and_raise(ActiveRecord::RecordNotDestroyed)
response = subject
expect(response).to be_error
expect(response.message).to eq('Topics could not be merged!')
expect(source_topic.projects).to contain_exactly(project_1, project_2, project_4)
expect(target_topic.projects).to contain_exactly(project_3, project_4)
end
end
context 'for parameter validation' do
using RSpec::Parameterized::TableSyntax
subject { described_class.new(source_topic_parameter, target_topic_parameter).execute }
where(:source_topic_parameter, :target_topic_parameter, :expected_message) do
nil | ref(:target_topic) | 'The source topic is not a topic.'
ref(:source_topic) | nil | 'The target topic is not a topic.'
ref(:target_topic) | ref(:target_topic) | 'The source topic and the target topic are identical.'
end
with_them do
it 'raises correct error' do
response = subject
expect(response).to be_error
expect(response.message).to eq(expected_message)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Analytics
module CycleAnalytics
module Stages
class ListService < Analytics::CycleAnalytics::Stages::BaseService
def execute
return forbidden unless allowed?
success(build_default_stages)
end
private
def allowed?
can?(current_user, :read_cycle_analytics, parent.project)
end
def success(stages)
ServiceResponse.success(payload: { stages: stages })
end
end
end
end
end
Analytics::CycleAnalytics::Stages::ListService.prepend_mod_with('Analytics::CycleAnalytics::Stages::ListService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Analytics::CycleAnalytics::Stages::ListService, feature_category: :value_stream_management do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:project_namespace) { project.project_namespace.reload }
let(:value_stream) { Analytics::CycleAnalytics::ValueStream.build_default_value_stream(project_namespace) }
let(:stages) { subject.payload[:stages] }
subject do
described_class.new(parent: project_namespace, current_user: user, params: { value_stream: value_stream }).execute
end
before_all do
project.add_reporter(user)
end
it 'returns only the default stages' do
expect(stages.size).to eq(Gitlab::Analytics::CycleAnalytics::DefaultStages.all.size)
end
it 'provides the default stages as non-persisted objects' do
expect(stages.map(&:id)).to all(be_nil)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Deployments
# Service class for linking merge requests to deployments.
class LinkMergeRequestsService
attr_reader :deployment
# The number of commits per query for which to find merge requests.
COMMITS_PER_QUERY = 5_000
def initialize(deployment)
@deployment = deployment
end
def execute
# Review apps have the environment type set (e.g. to `review`, though the
# exact value may differ). We don't want to link merge requests to review
# app deployments, as this is not useful.
return unless deployment.environment.should_link_to_merge_requests?
# This service is triggered by a Sidekiq worker, which only runs when a
# deployment is successful. We add an extra check here in case we ever
# call this service elsewhere and forget to check the status there.
#
# The reason we only want to link successful deployments is as follows:
# when we link a merge request, we don't link it to future deployments for
# the same environment. If we were to link an MR to a failed deploy, we
# wouldn't be able to later on link it to a successful deploy (e.g. after
# the deploy is retried).
#
# In addition, showing failed deploys in the UI of a merge request isn't
# useful to users, as they can't act upon the information in any
# meaningful way (i.e. they can't just retry the deploy themselves).
return unless deployment.success?
if (prev = deployment.previous_deployment)
link_merge_requests_for_range(prev.sha, deployment.sha)
else
# When no previous deployment is found we fall back to linking all merge
# requests merged into the deployed branch. This will not always be
# accurate, but it's better than having no data.
#
# We can't use the first commit in the repository as a base to compare
# to, as this will not scale to large repositories. For example, GitLab
# itself has over 150 000 commits.
link_all_merged_merge_requests
end
end
def link_merge_requests_for_range(from, to)
commits = project
.repository
.commits_between(from, to)
.map(&:id)
# For some projects the list of commits to deploy may be very large. To
# ensure we do not end up running SQL queries with thousands of WHERE IN
# values, we run one query per a certain number of commits.
#
# In most cases this translates to only a single query. For very large
# deployment we may end up running a handful of queries to get and insert
# the data.
commits.each_slice(COMMITS_PER_QUERY) do |slice|
merge_requests =
project.merge_requests.merged.by_merge_commit_sha(slice)
deployment.link_merge_requests(merge_requests)
# The cherry picked commits are tracked via `notes.commit_id`
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22209
#
# NOTE: cross-joining `merge_requests` table and `notes` table could
# result in very poor performance because PG planner often uses an
# inappropriate index.
# See https://gitlab.com/gitlab-org/gitlab/-/issues/321032.
mr_ids = project.notes.cherry_picked_merge_requests(slice)
picked_merge_requests = project.merge_requests.id_in(mr_ids)
deployment.link_merge_requests(picked_merge_requests)
end
end
def link_all_merged_merge_requests
merge_requests =
project.merge_requests.merged.by_target_branch(deployment.ref)
deployment.link_merge_requests(merge_requests)
end
private
def project
deployment.project
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Deployments::LinkMergeRequestsService, feature_category: :continuous_delivery do
let(:project) { create(:project, :repository) }
# * ddd0f15 Merge branch 'po-fix-test-env-path' into 'master'
# |\
# | * 2d1db52 Correct test_env.rb path for adding branch
# |/
# * 1e292f8 Merge branch 'cherry-pick-ce369011' into 'master'
# |\
# | * c1c67ab Add file with a _flattable_ path
# |/
# * 7975be0 Merge branch 'rd-add-file-larger-than-1-mb' into 'master'
let_it_be(:first_deployment_sha) { '7975be0116940bf2ad4321f79d02a55c5f7779aa' }
let_it_be(:mr1_merge_commit_sha) { '1e292f8fedd741b75372e19097c76d327140c312' }
let_it_be(:mr2_merge_commit_sha) { 'ddd0f15ae83993f5cb66a927a28673882e99100b' }
describe '#execute' do
context 'when the deployment is for a review environment' do
it 'does nothing' do
environment =
create(:environment, environment_type: 'review', name: 'review/foo')
deploy = create(:deployment, :success, environment: environment)
expect(deploy).not_to receive(:link_merge_requests)
described_class.new(deploy).execute
end
end
context 'when the deployment is for one of the production environments' do
it 'links merge requests' do
environment =
create(:environment, environment_type: 'production', name: 'production/gcp')
deploy = create(:deployment, :success, environment: environment)
expect(deploy).to receive(:link_merge_requests).once
described_class.new(deploy).execute
end
end
context 'when the deployment failed' do
it 'does nothing' do
environment = create(:environment, name: 'foo')
deploy = create(:deployment, :failed, environment: environment)
expect(deploy).not_to receive(:link_merge_requests)
described_class.new(deploy).execute
end
end
context 'when there is a previous deployment' do
it 'links all merge requests merged since the previous deployment' do
deploy1 = create(
:deployment,
:success,
project: project,
sha: first_deployment_sha
)
deploy2 = create(
:deployment,
:success,
project: deploy1.project,
environment: deploy1.environment,
sha: mr2_merge_commit_sha
)
service = described_class.new(deploy2)
expect(service)
.to receive(:link_merge_requests_for_range)
.with(first_deployment_sha, mr2_merge_commit_sha)
service.execute
end
end
context 'when there are no previous deployments' do
it 'links all merged merge requests' do
deploy = create(:deployment, :success, project: project)
service = described_class.new(deploy)
expect(service).to receive(:link_all_merged_merge_requests)
service.execute
end
end
end
describe '#link_merge_requests_for_range' do
it 'links merge requests' do
environment = create(:environment, project: project)
deploy =
create(:deployment, :success, project: project, environment: environment)
mr1 = create(
:merge_request,
:merged,
merge_commit_sha: mr1_merge_commit_sha,
source_project: project,
target_project: project
)
mr2 = create(
:merge_request,
:merged,
merge_commit_sha: mr2_merge_commit_sha,
source_project: project,
target_project: project
)
described_class.new(deploy).link_merge_requests_for_range(
first_deployment_sha,
mr2_merge_commit_sha
)
expect(deploy.merge_requests).to include(mr1, mr2)
end
it 'links picked merge requests' do
environment = create(:environment, project: project)
deploy =
create(:deployment, :success, project: project, environment: environment)
picked_mr = create(
:merge_request,
:merged,
merge_commit_sha: '123abc',
source_project: project,
target_project: project
)
mr1 = create(
:merge_request,
:merged,
merge_commit_sha: mr1_merge_commit_sha,
source_project: project,
target_project: project
)
# mr1 includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
described_class.new(deploy).link_merge_requests_for_range(
first_deployment_sha,
mr1_merge_commit_sha
)
expect(deploy.merge_requests).to include(mr1, picked_mr)
end
it "doesn't link the same merge_request twice" do
create(:merge_request, :merged, merge_commit_sha: mr1_merge_commit_sha, source_project: project)
picked_mr = create(:merge_request, :merged, merge_commit_sha: '123abc', source_project: project)
# the first MR includes c1c67abba which is a cherry-pick of the fake picked_mr merge request
create(:track_mr_picking_note, noteable: picked_mr, project: project, commit_id: 'c1c67abbaf91f624347bb3ae96eabe3a1b742478')
environment = create(:environment, project: project)
old_deploy =
create(:deployment, :success, project: project, environment: environment)
# manually linking all the MRs to the old_deploy
old_deploy.link_merge_requests(project.merge_requests)
deploy =
create(:deployment, :success, project: project, environment: environment)
described_class.new(deploy).link_merge_requests_for_range(
first_deployment_sha,
mr1_merge_commit_sha
)
expect(deploy.merge_requests).to be_empty
end
end
describe '#link_all_merged_merge_requests' do
it 'links all merged merge requests targeting the deployed branch' do
environment = create(:environment, project: project)
deploy =
create(:deployment, :success, project: project, environment: environment)
mr1 = create(
:merge_request,
:merged,
source_project: project,
target_project: project,
source_branch: 'source1',
target_branch: deploy.ref
)
mr2 = create(
:merge_request,
:merged,
source_project: project,
target_project: project,
source_branch: 'source2',
target_branch: deploy.ref
)
mr3 = create(
:merge_request,
:merged,
source_project: project,
target_project: project,
target_branch: 'foo'
)
described_class.new(deploy).link_all_merged_merge_requests
expect(deploy.merge_requests).to include(mr1, mr2)
expect(deploy.merge_requests).not_to include(mr3)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Deployments
# This service archives old deploymets and deletes deployment refs for
# keeping the project repository performant.
class ArchiveInProjectService < ::BaseService
BATCH_SIZE = 100
def execute
deployments = Deployment.archivables_in(project, limit: BATCH_SIZE)
return success(result: :empty) if deployments.empty?
ids = deployments.map(&:id)
ref_paths = deployments.map(&:ref_path)
project.repository.delete_refs(*ref_paths)
project.deployments.id_in(ids).update_all(archived: true)
success(result: :archived, count: ids.count)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Deployments::ArchiveInProjectService, feature_category: :continuous_delivery do
let_it_be(:project) { create(:project, :repository) }
let(:service) { described_class.new(project, nil) }
describe '#execute' do
subject { service.execute }
context 'when there are archivable deployments' do
let!(:deployments) { create_list(:deployment, 3, project: project) }
let!(:deployment_refs) { deployments.map(&:ref_path) }
before do
deployments.each(&:create_ref)
allow(Deployment).to receive(:archivables_in) { deployments }
end
it 'returns result code' do
expect(subject[:result]).to eq(:archived)
expect(subject[:status]).to eq(:success)
expect(subject[:count]).to eq(3)
end
it 'archives the deployment' do
expect(deployments.map(&:archived?)).to be_all(false)
expect(deployment_refs_exist?).to be_all(true)
subject
deployments.each(&:reload)
expect(deployments.map(&:archived?)).to be_all(true)
expect(deployment_refs_exist?).to be_all(false)
end
context 'when ref does not exist by some reason' do
before do
project.repository.delete_refs(*deployment_refs)
end
it 'does not raise an error' do
expect(deployment_refs_exist?).to be_all(false)
expect { subject }.not_to raise_error
expect(deployment_refs_exist?).to be_all(false)
end
end
def deployment_refs_exist?
deployment_refs.map { |path| project.repository.ref_exists?(path) }
end
end
context 'when there are no archivable deployments' do
before do
allow(Deployment).to receive(:archivables_in) { Deployment.none }
end
it 'returns result code' do
expect(subject[:result]).to eq(:empty)
expect(subject[:status]).to eq(:success)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Deployments
# This class creates a deployment record for a pipeline job.
class CreateForJobService
DeploymentCreationError = Class.new(StandardError)
def execute(job)
return unless job.is_a?(::Ci::Processable) && job.persisted_environment.present?
environment = job.actual_persisted_environment
deployment = to_resource(job, environment)
return unless deployment
deployment.save!
job.association(:deployment).target = deployment
job.association(:deployment).loaded!
deployment
rescue ActiveRecord::RecordInvalid => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(
DeploymentCreationError.new(e.message), job_id: job.id)
end
private
def to_resource(job, environment)
return job.deployment if job.deployment
return unless job.deployment_job?
deployment = ::Deployment.new(attributes(job, environment))
# If there is a validation error on environment creation, such as
# the name contains invalid character, the job will fall back to a
# non-environment job.
return unless deployment.valid? && deployment.environment.persisted?
if cluster = deployment.environment.deployment_platform&.cluster # rubocop: disable Lint/AssignmentInCondition
deployment.deployment_cluster = ::DeploymentCluster.new(
cluster_id: cluster.id,
kubernetes_namespace: cluster.kubernetes_namespace_for(deployment.environment, deployable: job)
)
end
# Allocate IID for deployments.
# This operation must be outside of transactions of pipeline creations.
deployment.ensure_project_iid!
deployment
end
def attributes(job, environment)
{
project: job.project,
environment: environment,
deployable: job,
user: job.user,
ref: job.ref,
tag: job.tag,
sha: job.sha,
on_stop: job.on_stop
}
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Deployments::CreateForJobService, feature_category: :continuous_delivery do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:service) { described_class.new }
it_behaves_like 'create deployment for job' do
let(:factory_type) { :ci_build }
end
it_behaves_like 'create deployment for job' do
let(:factory_type) { :ci_bridge }
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Deployments
class UpdateEnvironmentService
attr_reader :deployment
attr_reader :deployable
delegate :environment, to: :deployment
delegate :variables, to: :deployable
delegate :options, to: :deployable, allow_nil: true
EnvironmentUpdateFailure = Class.new(StandardError)
def initialize(deployment)
@deployment = deployment
@deployable = deployment.deployable
end
def execute
deployment.create_ref
deployment.invalidate_cache
update_environment(deployment)
deployment
end
def update_environment(deployment)
ApplicationRecord.transaction do
# Renew attributes at update
renew_external_url
renew_auto_stop_in
renew_deployment_tier
environment.fire_state_event(action)
if environment.save
deployment.update_merge_request_metrics! unless environment.stopped?
else
# If there is a validation error on environment update, such as
# the external URL is malformed, the error message is recorded for debugging purpose.
# We should surface the error message to users for letting them to take an action.
# See https://gitlab.com/gitlab-org/gitlab/-/issues/21182.
Gitlab::ErrorTracking.track_exception(
EnvironmentUpdateFailure.new,
project_id: deployment.project_id,
environment_id: environment.id,
reason: environment.errors.full_messages.to_sentence)
end
end
end
private
def environment_options
options&.dig(:environment) || {}
end
def expanded_environment_url
return unless environment_url
ExpandVariables.expand(environment_url, -> { variables.sort_and_expand_all })
end
def expanded_auto_stop_in
return unless auto_stop_in
ExpandVariables.expand(auto_stop_in, -> { variables.sort_and_expand_all })
end
def environment_url
environment_options[:url]
end
def action
environment_options[:action] || 'start'
end
def auto_stop_in
deployable&.environment_auto_stop_in
end
def renew_external_url
if (url = expanded_environment_url)
environment.external_url = url
end
end
def renew_auto_stop_in
return unless deployable
if (value = expanded_auto_stop_in)
environment.auto_stop_in = value
end
end
def renew_deployment_tier
return unless deployable
if (tier = deployable.environment_tier_from_options)
environment.tier = tier
end
end
end
end
Deployments::UpdateEnvironmentService.prepend_mod_with('Deployments::UpdateEnvironmentService')
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Deployments::UpdateEnvironmentService, feature_category: :continuous_delivery do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:options) { { name: environment_name } }
let(:pipeline) do
create(
:ci_pipeline,
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
project: project
)
end
let(:job) do
create(:ci_build,
:with_deployment,
pipeline: pipeline,
ref: 'master',
tag: false,
environment: environment_name,
options: { environment: options },
project: project)
end
let(:deployment) { job.deployment }
let(:environment) { deployment.environment }
let(:environment_name) { 'production' }
subject(:service) { described_class.new(deployment) }
before do
allow(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
allow(Deployments::HooksWorker).to receive(:perform_async)
job.success! # Create/Succeed deployment
end
describe '#execute' do
let(:store) { Gitlab::EtagCaching::Store.new }
it 'invalidates the environment etag cache' do
old_value = store.get(environment.etag_cache_key)
service.execute
expect(store.get(environment.etag_cache_key)).not_to eq(old_value)
end
it 'creates ref' do
expect_any_instance_of(Repository)
.to receive(:create_ref)
.with(deployment.sha, "refs/environments/production/deployments/#{deployment.iid}")
service.execute
end
it 'updates merge request metrics' do
expect_any_instance_of(Deployment)
.to receive(:update_merge_request_metrics!)
service.execute
end
it 'returns the deployment' do
expect(subject.execute).to eq(deployment)
end
it 'returns the deployment when could not save the environment' do
allow(environment).to receive(:save).and_return(false)
expect(subject.execute).to eq(deployment)
end
it 'returns the deployment when environment is stopped' do
allow(environment).to receive(:stopped?).and_return(true)
expect(subject.execute).to eq(deployment)
end
context 'when deployable is bridge job' do
let(:job) do
create(:ci_bridge,
:with_deployment,
pipeline: pipeline,
ref: 'master',
tag: false,
environment: environment_name,
options: { environment: options },
project: project)
end
it 'creates ref' do
expect_any_instance_of(Repository)
.to receive(:create_ref)
.with(deployment.sha, "refs/environments/production/deployments/#{deployment.iid}")
service.execute
end
end
context 'when start action is defined' do
let(:options) { { name: 'production', action: 'start' } }
context 'and environment is stopped' do
before do
environment.stop_complete
end
it 'makes environment available' do
service.execute
expect(environment.reload).to be_available
end
end
end
context 'when external URL is specified and the tier is unset' do
let(:options) { { name: 'production', url: external_url } }
before do
environment.update_columns(external_url: external_url, tier: nil)
job.update!(environment: 'production')
end
context 'when external URL is valid' do
let(:external_url) { 'https://google.com' }
it 'succeeds to update the tier automatically' do
expect { subject.execute }.to change { environment.tier }.from(nil).to('production')
end
end
context 'when external URL is invalid' do
let(:external_url) { 'javascript:alert("hello")' }
it 'fails to update the tier due to validation error' do
expect { subject.execute }.not_to change { environment.reload.tier }
end
it 'tracks an exception' do
expect(Gitlab::ErrorTracking).to receive(:track_exception)
.with(
an_instance_of(described_class::EnvironmentUpdateFailure),
project_id: project.id,
environment_id: environment.id,
reason: %q(External url javascript scheme is not allowed)
)
.once
subject.execute
end
end
end
context 'when variables are used' do
let(:options) do
{ name: 'review-apps/$CI_COMMIT_REF_NAME',
url: 'http://$CI_COMMIT_REF_NAME.review-apps.gitlab.com' }
end
before do
environment.update!(name: 'review-apps/master')
job.update!(environment: 'review-apps/$CI_COMMIT_REF_NAME')
end
it 'does not create a new environment' do
expect { subject.execute }.not_to change { Environment.count }
end
it 'updates external url' do
subject.execute
expect(subject.environment.name).to eq('review-apps/master')
expect(subject.environment.external_url).to eq('http://master.review-apps.gitlab.com')
end
end
context 'when auto_stop_in are used' do
let(:options) do
{ name: 'production', auto_stop_in: '1 day' }
end
before do
environment.update_attribute(:auto_stop_at, nil)
end
it 'renews auto stop at' do
freeze_time do
expect { subject.execute }
.to change { environment.reset.auto_stop_at&.round }.from(nil).to(1.day.since.round)
end
end
context 'when value is a variable' do
let(:options) { { name: 'production', auto_stop_in: '$TTL' } }
let(:yaml_variables) do
[
{ key: "TTL", value: '2 days', public: true }
]
end
before do
job.update_attribute(:yaml_variables, yaml_variables)
end
it 'renews auto stop at with expanded variable value' do
freeze_time do
expect { subject.execute }
.to change { environment.reset.auto_stop_at&.round }.from(nil).to(2.days.since.round)
end
end
end
end
context 'when deployment tier is specified' do
let(:environment_name) { 'customer-portal' }
let(:options) { { name: environment_name, deployment_tier: 'production' } }
context 'when tier has already been set' do
before do
environment.update_column(:tier, Environment.tiers[:other])
end
it 'overwrites the guessed tier by the specified deployment tier' do
expect { subject.execute }
.to change { environment.reset.tier }.from('other').to('production')
end
end
context 'when tier has not been set' do
before do
environment.update_column(:tier, nil)
end
it 'sets the specified deployment tier' do
expect { subject.execute }
.to change { environment.reset.tier }.from(nil).to('production')
end
context 'when deployment was created by an external CD system' do
before do
deployment.update_column(:deployable_id, nil)
deployment.reload
end
it 'guesses the deployment tier' do
expect { subject.execute }
.to change { environment.reset.tier }.from(nil).to('other')
end
end
end
end
context 'when deployment tier is not specified' do
let(:environment_name) { 'customer-portal' }
let(:options) { { name: environment_name } }
it 'guesses the deployment tier' do
environment.update_column(:tier, nil)
expect { subject.execute }
.to change { environment.reset.tier }.from(nil).to('other')
end
end
end
describe '#expanded_environment_url' do
subject { service.send(:expanded_environment_url) }
context 'when yaml environment uses $CI_COMMIT_REF_NAME' do
let(:job) do
create(
:ci_build,
:with_deployment,
pipeline: pipeline,
ref: 'master',
environment: 'production',
project: project,
options: { environment: { name: 'production', url: 'http://review/$CI_COMMIT_REF_NAME' } }
)
end
it { is_expected.to eq('http://review/master') }
end
context 'when yaml environment uses $CI_ENVIRONMENT_SLUG' do
let(:job) do
create(
:ci_build,
:with_deployment,
pipeline: pipeline,
ref: 'master',
environment: 'prod-slug',
project: project,
options: { environment: { name: 'prod-slug', url: 'http://review/$CI_ENVIRONMENT_SLUG' } }
)
end
it { is_expected.to eq('http://review/prod-slug') }
end
context 'when yaml environment uses yaml_variables containing symbol keys' do
let(:job) do
create(
:ci_build,
:with_deployment,
pipeline: pipeline,
yaml_variables: [{ key: :APP_HOST, value: 'host' }],
environment: 'production',
project: project,
options: { environment: { name: 'production', url: 'http://review/$APP_HOST' } }
)
end
it { is_expected.to eq('http://review/host') }
end
context 'when job variables are generated during runtime' do
let(:job) do
create(
:ci_build,
:with_deployment,
pipeline: pipeline,
environment: 'review/$CI_COMMIT_REF_NAME',
project: project,
job_variables: [job_variable],
options: { environment: { name: 'review/$CI_COMMIT_REF_NAME', url: 'http://$DYNAMIC_ENV_URL' } }
)
end
let(:job_variable) do
build(:ci_job_variable, :dotenv_source, key: 'DYNAMIC_ENV_URL', value: 'abc.test.com')
end
it 'expands the environment URL from the dynamic variable' do
is_expected.to eq('http://abc.test.com')
end
end
context 'when environment url uses a nested variable' do
let(:yaml_variables) do
[
{ key: 'MAIN_DOMAIN', value: '${STACK_NAME}.example.com' },
{ key: 'STACK_NAME', value: 'appname-${ENVIRONMENT_NAME}' },
{ key: 'ENVIRONMENT_NAME', value: '${CI_COMMIT_REF_SLUG}' }
]
end
let(:job) do
create(
:ci_build,
:with_deployment,
pipeline: pipeline,
ref: 'master',
environment: 'production',
project: project,
yaml_variables: yaml_variables,
options: { environment: { name: 'production', url: 'http://$MAIN_DOMAIN' } }
)
end
it { is_expected.to eq('http://appname-master.example.com') }
end
context 'when yaml environment does not have url' do
let(:job) { create(:ci_build, :with_deployment, pipeline: pipeline, environment: 'staging', project: project) }
it 'returns the external_url from persisted environment' do
is_expected.to be_nil
end
end
end
describe "merge request metrics" do
let(:merge_request) { create(:merge_request, target_branch: 'master', source_branch: 'feature', source_project: project) }
context "while updating the 'first_deployed_to_production_at' time" do
before do
merge_request.metrics.update!(merged_at: 1.hour.ago)
end
context "for merge requests merged before the current deploy" do
it "sets the time if the deploy's environment is 'production'" do
service.execute
expect(merge_request.reload.metrics.first_deployed_to_production_at).to be_like_time(deployment.finished_at)
end
context 'when job deploys to staging' do
let(:job) do
create(:ci_build,
:with_deployment,
pipeline: pipeline,
ref: 'master',
tag: false,
environment: 'staging',
options: { environment: { name: 'staging' } },
project: project)
end
it "doesn't set the time if the deploy's environment is not 'production'" do
service.execute
expect(merge_request.reload.metrics.first_deployed_to_production_at).to be_nil
end
end
it 'does not raise errors if the merge request does not have a metrics record' do
merge_request.metrics.destroy!
expect(merge_request.reload.metrics).to be_nil
expect { service.execute }.not_to raise_error
end
end
context "for merge requests merged before the previous deploy" do
context "if the 'first_deployed_to_production_at' time is already set" do
it "does not overwrite the older 'first_deployed_to_production_at' time" do
# Previous deploy
service.execute
expect(merge_request.reload.metrics.first_deployed_to_production_at).to be_like_time(deployment.finished_at)
# Current deploy
travel_to(12.hours.from_now) do
service.execute
expect(merge_request.reload.metrics.first_deployed_to_production_at).to be_like_time(deployment.finished_at)
end
end
end
context "if the 'first_deployed_to_production_at' time is not already set" do
it "does not overwrite the older 'first_deployed_to_production_at' time" do
# Previous deploy
time = 5.minutes.from_now
travel_to(time) { service.execute }
expect(merge_request.reload.metrics.merged_at).to be < merge_request.reload.metrics.first_deployed_to_production_at
previous_time = merge_request.reload.metrics.first_deployed_to_production_at
# Current deploy
travel_to(time + 12.hours) { service.execute }
expect(merge_request.reload.metrics.first_deployed_to_production_at).to eq(previous_time)
end
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Deployments
class UpdateService
attr_reader :deployment, :params
def initialize(deployment, params)
@deployment = deployment
@params = params
end
def execute
deployment.update_status(params[:status])
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Deployments::UpdateService, feature_category: :continuous_delivery do
let(:deploy) { create(:deployment) }
describe '#execute' do
it 'can update the status to running' do
expect(described_class.new(deploy, status: 'running').execute)
.to be_truthy
expect(deploy).to be_running
end
it 'can update the status to success' do
expect(described_class.new(deploy, status: 'success').execute)
.to be_truthy
expect(deploy).to be_success
end
it 'can update the status to failed' do
expect(described_class.new(deploy, status: 'failed').execute)
.to be_truthy
expect(deploy).to be_failed
end
it 'can update the status to canceled' do
expect(described_class.new(deploy, status: 'canceled').execute)
.to be_truthy
expect(deploy).to be_canceled
end
it 'does not change the state if the status is invalid' do
expect(described_class.new(deploy, status: 'kittens').execute)
.to be_falsy
expect(deploy).to be_created
end
it 'links merge requests when changing the status to success', :sidekiq_inline do
mr = create(
:merge_request,
:merged,
target_project: deploy.project,
source_project: deploy.project,
target_branch: 'master',
source_branch: 'foo'
)
described_class.new(deploy, status: 'success').execute
expect(deploy.merge_requests).to eq([mr])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Deployments
class CreateService
attr_reader :environment, :current_user, :params
def initialize(environment, current_user, params)
@environment = environment
@current_user = current_user
@params = params
end
def execute
return last_deployment if last_deployment&.equal_to?(params)
environment.deployments.build(deployment_attributes).tap do |deployment|
# Deployment#change_status already saves the model, so we only need to
# call #save ourselves if no status is provided.
if (status = params[:status])
deployment.update_status(status)
else
deployment.save
end
end
end
def deployment_attributes
# We use explicit parameters here so we never by accident allow parameters
# to be set that one should not be able to set (e.g. the row ID).
{
project_id: environment.project_id,
environment_id: environment.id,
ref: params[:ref],
tag: params[:tag],
sha: params[:sha],
user: current_user,
on_stop: params[:on_stop]
}
end
private
def last_deployment
@environment.last_deployment
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Deployments::CreateService, feature_category: :continuous_delivery do
let(:user) { create(:user) }
describe '#execute' do
let(:project) { create(:project, :repository) }
let(:environment) { create(:environment, project: project) }
it 'creates a deployment' do
service = described_class.new(
environment,
user,
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
ref: 'master',
tag: false,
status: 'success'
)
expect(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).to receive(:perform_async)
expect(Deployments::HooksWorker).to receive(:perform_async)
expect(service.execute).to be_persisted
end
it 'does not change the status if no status is given' do
service = described_class.new(
environment,
user,
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
ref: 'master',
tag: false
)
expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
expect(Deployments::HooksWorker).not_to receive(:perform_async)
expect(service.execute).to be_persisted
end
context 'when the last deployment has the same parameters' do
let(:params) do
{
sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
ref: 'master',
tag: false,
status: 'success'
}
end
it 'does not create a new deployment' do
described_class.new(environment, user, params).execute
expect(Deployments::UpdateEnvironmentWorker).not_to receive(:perform_async)
expect(Deployments::LinkMergeRequestWorker).not_to receive(:perform_async)
expect(Deployments::HooksWorker).not_to receive(:perform_async)
described_class.new(environment.reload, user, params).execute
end
end
end
describe '#deployment_attributes' do
let(:environment) do
double(
:environment,
deployment_platform: double(:platform, cluster_id: 1),
project_id: 2,
id: 3
)
end
it 'only includes attributes that we want to persist' do
service = described_class.new(
environment,
user,
ref: 'master',
tag: true,
sha: '123',
foo: 'bar',
on_stop: 'stop'
)
expect(service.deployment_attributes).to eq(
project_id: 2,
environment_id: 3,
ref: 'master',
tag: true,
sha: '123',
user: user,
on_stop: 'stop'
)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class UploadsExportService
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
AVATAR_PATH = 'avatar'
attr_reader :exported_objects_count
def initialize(portable, export_path)
@portable = portable
@export_path = export_path
@exported_objects_count = 0
end
def execute(options = {})
relation = portable.uploads
if options[:batch_ids]
relation = relation.where(relation.model.primary_key => options[:batch_ids]) # rubocop:disable CodeReuse/ActiveRecord
end
relation.find_each(batch_size: BATCH_SIZE) do |upload| # rubocop: disable CodeReuse/ActiveRecord
uploader = upload.retrieve_uploader
next unless upload.exist?
next unless uploader.file
subdir_path = export_subdir_path(upload)
mkdir_p(subdir_path)
download_or_copy_upload(uploader, File.join(subdir_path, uploader.filename))
@exported_objects_count += 1
rescue StandardError => e
# Do not fail entire project export if something goes wrong during file download
# (e.g. downloaded file has filename that exceeds 255 characters).
# Ignore raised exception, skip such upload, log the error and keep going with the export instead.
Gitlab::ErrorTracking.log_exception(e, portable_id: portable.id, portable_class: portable.class.name, upload_id: upload.id)
end
end
private
attr_reader :portable, :export_path
def export_subdir_path(upload)
subdir = if upload.path == avatar_path
AVATAR_PATH
else
upload.try(:secret).to_s
end
File.join(export_path, subdir)
end
def avatar_path
@avatar_path ||= portable.avatar&.upload&.path
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::UploadsExportService, feature_category: :importers do
let(:export_path) { Dir.mktmpdir }
let(:project) { create(:project, avatar: fixture_file_upload('spec/fixtures/rails_sample.png', 'image/png')) }
let!(:upload) { create(:upload, :with_file, :issuable_upload, uploader: FileUploader, model: project) }
let(:exported_filepath) { File.join(export_path, upload.secret, upload.retrieve_uploader.filename) }
subject(:service) { described_class.new(project, export_path) }
after do
FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
end
describe '#execute' do
it 'exports project uploads and avatar' do
service.execute
expect(File).to exist(File.join(export_path, 'avatar', 'rails_sample.png'))
expect(File).to exist(exported_filepath)
end
context 'when export is batched' do
it 'exports only specified uploads' do
service.execute(batch_ids: [upload.id])
expect(service.exported_objects_count).to eq(1)
expect(File).not_to exist(File.join(export_path, 'avatar', 'rails_sample.png'))
expect(File).to exist(exported_filepath)
end
end
context 'when upload has underlying file missing' do
context 'with an upload missing its file' do
it 'does not cause errors' do
File.delete(upload.absolute_path)
expect { service.execute }.not_to raise_error
expect(File).not_to exist(exported_filepath)
end
end
context 'when upload is in object storage' do
before do
stub_uploads_object_storage(FileUploader)
end
shared_examples 'export with invalid upload' do
it 'ignores problematic upload and logs exception' do
allow(service).to receive(:download_or_copy_upload).and_raise(exception)
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(
instance_of(exception), {
portable_id: project.id,
portable_class: 'Project',
upload_id: upload.id
}
)
expect(Gitlab::ErrorTracking)
.to receive(:log_exception)
.with(
instance_of(exception), {
portable_id: project.id,
portable_class: 'Project',
upload_id: project.avatar.upload.id
}
)
service.execute
expect(File).not_to exist(exported_filepath)
end
end
context 'when filename is too long' do
let(:exception) { Errno::ENAMETOOLONG }
include_examples 'export with invalid upload'
end
context 'when network exception occurs' do
let(:exception) { Net::OpenTimeout }
include_examples 'export with invalid upload'
end
end
end
end
describe '#exported_objects_count' do
it 'return the number of exported uploads' do
service.execute
expect(service.exported_objects_count).to eq(2)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class LfsObjectsExportService
include Gitlab::ImportExport::CommandLineUtil
BATCH_SIZE = 100
attr_reader :exported_objects_count
def initialize(portable, export_path)
@portable = portable
@export_path = export_path
@lfs_json = {}
@exported_objects_count = 0
end
def execute(options = {})
relation = portable.lfs_objects
if options[:batch_ids]
relation = relation.where(relation.model.primary_key => options[:batch_ids]) # rubocop:disable CodeReuse/ActiveRecord
end
relation.find_in_batches(batch_size: BATCH_SIZE) do |batch| # rubocop: disable CodeReuse/ActiveRecord
batch.each do |lfs_object|
save_lfs_object(lfs_object)
@exported_objects_count += 1
end
append_lfs_json_for_batch(batch)
end
write_lfs_json
end
private
attr_reader :portable, :export_path, :lfs_json
def save_lfs_object(lfs_object)
destination_filepath = File.join(export_path, lfs_object.oid)
if lfs_object.local_store?
return unless File.exist?(lfs_object.file.path)
copy_files(lfs_object.file.path, destination_filepath)
else
download(lfs_object.file.url, destination_filepath)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def append_lfs_json_for_batch(lfs_objects_batch)
lfs_objects_projects = LfsObjectsProject
.select('lfs_objects.oid, array_agg(distinct lfs_objects_projects.repository_type) as repository_types')
.joins(:lfs_object)
.where(project: portable, lfs_object: lfs_objects_batch)
.group('lfs_objects.oid')
lfs_objects_projects.each do |group|
oid = group.oid
lfs_json[oid] ||= []
lfs_json[oid] += group.repository_types
end
end
# rubocop: enable CodeReuse/ActiveRecord
def write_lfs_json
filepath = File.join(export_path, "#{BulkImports::FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION}.json")
File.write(filepath, Gitlab::Json.dump(lfs_json))
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::LfsObjectsExportService, feature_category: :importers do
let_it_be(:project) { create(:project) }
let_it_be(:lfs_json_filename) { "#{BulkImports::FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION}.json" }
let_it_be(:remote_url) { 'http://my-object-storage.local' }
let(:export_path) { Dir.mktmpdir }
let(:lfs_object) { create(:lfs_object, :with_file) }
subject(:service) { described_class.new(project, export_path) }
before do
stub_lfs_object_storage
%w[wiki design].each do |repository_type|
create(
:lfs_objects_project,
project: project,
repository_type: repository_type,
lfs_object: lfs_object
)
end
project.lfs_objects << lfs_object
end
after do
FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
end
describe '#execute' do
it 'exports lfs objects and their repository types' do
filepath = File.join(export_path, lfs_json_filename)
service.execute
expect(File).to exist(File.join(export_path, lfs_object.oid))
expect(File).to exist(filepath)
lfs_json = Gitlab::Json.parse(File.read(filepath))
expect(lfs_json).to eq(
{
lfs_object.oid => [
LfsObjectsProject.repository_types['wiki'],
LfsObjectsProject.repository_types['design'],
nil
]
}
)
end
context 'when export is batched' do
it 'exports only specified lfs objects' do
new_lfs_object = create(:lfs_object, :with_file)
project.lfs_objects << new_lfs_object
service.execute(batch_ids: [new_lfs_object.id])
expect(File).to exist(File.join(export_path, new_lfs_object.oid))
expect(File).not_to exist(File.join(export_path, lfs_object.oid))
end
end
context 'when lfs object has file on disk missing' do
it 'does not attempt to copy non-existent file' do
FileUtils.rm(lfs_object.file.path)
expect(service).not_to receive(:copy_files)
service.execute
expect(File).not_to exist(File.join(export_path, lfs_object.oid))
end
end
context 'when lfs object is remotely stored' do
let(:lfs_object) { create(:lfs_object, :object_storage) }
it 'downloads lfs object from object storage' do
expect_next_instance_of(LfsObjectUploader) do |instance|
expect(instance).to receive(:url).and_return(remote_url)
end
expect(subject).to receive(:download).with(remote_url, File.join(export_path, lfs_object.oid))
service.execute
end
end
end
describe '#exported_objects_count' do
it 'return the number of exported lfs objects' do
project.lfs_objects << create(:lfs_object, :with_file)
service.execute
expect(service.exported_objects_count).to eq(2)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# File Download Service allows remote file download into tmp directory.
#
# @param configuration [BulkImports::Configuration] Config object containing url and access token
# @param relative_url [String] Relative URL to download the file from
# @param tmpdir [String] Temp directory to store downloaded file to. Must be located under `Dir.tmpdir`.
# @param file_size_limit [Integer] Maximum allowed file size. If 0, no limit will apply.
# @param allowed_content_types [Array<String>] Allowed file content types
# @param filename [String] Name of the file to download, if known. Use remote filename if none given.
module BulkImports
class FileDownloadService
include ::BulkImports::FileDownloads::FilenameFetch
include ::BulkImports::FileDownloads::Validations
ServiceError = Class.new(StandardError)
DEFAULT_ALLOWED_CONTENT_TYPES = %w[application/gzip application/octet-stream].freeze
LAST_CHUNK_CONTEXT_CHAR_LIMIT = 200
def initialize(
configuration:,
relative_url:,
tmpdir:,
file_size_limit: default_file_size_limit,
allowed_content_types: DEFAULT_ALLOWED_CONTENT_TYPES,
filename: nil)
@configuration = configuration
@relative_url = relative_url
@filename = filename
@tmpdir = tmpdir
@file_size_limit = file_size_limit
@allowed_content_types = allowed_content_types
@remote_content_validated = false
end
def execute
validate_tmpdir
validate_filepath
validate_url
download_file
validate_symlink
filepath
end
private
attr_reader :configuration, :relative_url, :tmpdir, :file_size_limit, :allowed_content_types,
:response_headers, :last_chunk_context, :response_code
def download_file
File.open(filepath, 'wb') do |file|
bytes_downloaded = 0
http_client.stream(relative_url) do |chunk|
next if bytes_downloaded == 0 && [301, 302, 303, 307, 308].include?(chunk.code)
@response_code = chunk.code
@response_headers ||= Gitlab::HTTP::Response::Headers.new(chunk.http_response.to_hash)
@last_chunk_context = chunk.to_s.truncate(LAST_CHUNK_CONTEXT_CHAR_LIMIT)
unless @remote_content_validated
validate_content_type
validate_content_length
@remote_content_validated = true
end
bytes_downloaded += chunk.size
validate_size!(bytes_downloaded)
raise(ServiceError, "File download error #{chunk.code}") unless chunk.code == 200
file.write(chunk)
end
end
rescue StandardError => e
FileUtils.rm_f(filepath)
raise e
end
def raise_error(message)
logger.warn(
message: message,
response_code: response_code,
response_headers: response_headers,
importer: 'gitlab_migration',
last_chunk_context: last_chunk_context
)
raise ServiceError, message
end
def http_client
@http_client ||= BulkImports::Clients::HTTP.new(
url: configuration.url,
token: configuration.access_token
)
end
def allow_local_requests?
::Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
def validate_tmpdir
Gitlab::PathTraversal.check_allowed_absolute_path!(tmpdir, [Dir.tmpdir])
end
def filepath
@filepath ||= File.join(@tmpdir, filename)
end
def filename
@filename.presence || remote_filename
end
def logger
@logger ||= Logger.build
end
def validate_url
::Gitlab::UrlBlocker.validate!(
http_client.resource_url(relative_url),
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
schemes: %w[http https]
)
end
def default_file_size_limit
Gitlab::CurrentSettings.current_application_settings.bulk_import_max_download_file_size.megabytes
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::FileDownloadService, feature_category: :importers do
describe '#execute' do
let_it_be(:allowed_content_types) { %w[application/gzip application/octet-stream] }
let_it_be(:file_size_limit) { 5.gigabytes }
let_it_be(:config) { build(:bulk_import_configuration) }
let_it_be(:content_type) { 'application/octet-stream' }
let_it_be(:content_disposition) { nil }
let_it_be(:filename) { 'file_download_service_spec' }
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filepath) { File.join(tmpdir, filename) }
let_it_be(:content_length) { 1000 }
let(:headers) do
{
'content-length' => content_length,
'content-type' => content_type,
'content-disposition' => content_disposition
}
end
let(:chunk_code) { 200 }
let(:chunk_double) do
double('chunk', size: 100, code: chunk_code, http_response: double(to_hash: headers), to_s: 'some chunk context')
end
subject(:service) do
described_class.new(
configuration: config,
relative_url: '/test',
tmpdir: tmpdir,
filename: filename,
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
allow(client).to receive(:stream).and_yield(chunk_double)
end
allow(service).to receive(:response_headers).and_return(headers)
end
shared_examples 'downloads file' do
it 'downloads file' do
subject.execute
expect(File.exist?(filepath)).to eq(true)
expect(File.read(filepath)).to include('chunk')
end
end
include_examples 'downloads file'
context 'when content-type is application/gzip' do
let_it_be(:content_type) { 'application/gzip' }
include_examples 'downloads file'
end
context 'when url is not valid' do
it 'raises an error' do
stub_application_setting(allow_local_requests_from_web_hooks_and_services: false)
double = instance_double(BulkImports::Configuration, url: 'https://localhost', access_token: 'token')
service = described_class.new(
configuration: double,
relative_url: '/test',
tmpdir: tmpdir,
filename: filename,
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
expect { service.execute }.to raise_error(Gitlab::HTTP_V2::UrlBlocker::BlockedUrlError)
end
end
context 'when content-type is not valid' do
let(:content_type) { 'invalid' }
let(:import_logger) { instance_double(BulkImports::Logger) }
before do
allow(BulkImports::Logger).to receive(:build).and_return(import_logger)
allow(import_logger).to receive(:warn)
end
it 'logs and raises an error' do
expect(import_logger).to receive(:warn).once.with(
message: 'Invalid content type',
response_code: chunk_code,
response_headers: headers,
importer: 'gitlab_migration',
last_chunk_context: 'some chunk context'
)
expect { subject.execute }.to raise_error(described_class::ServiceError, 'Invalid content type')
end
end
context 'when content-length is not valid' do
context 'when content-length exceeds limit' do
let(:file_size_limit) { 1 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File size 1000 B exceeds limit of 1 B'
)
end
end
context 'when content-length is missing' do
let(:content_length) { nil }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Missing content-length header'
)
end
end
end
context 'when content-length is equals the file size limit' do
let(:content_length) { 150 }
let(:file_size_limit) { 150 }
it 'does not raise an error' do
expect { subject.execute }.not_to raise_error
end
end
context 'when partially downloaded file exceeds limit' do
let(:content_length) { 151 }
let(:file_size_limit) { 150 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File size 151 B exceeds limit of 150 B'
)
end
end
context 'when chunk code is not 200' do
let(:chunk_code) { 500 }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File download error 500'
)
end
context 'when chunk code is redirection' do
let(:chunk_code) { 303 }
it 'does not write a redirection chunk' do
expect { subject.execute }.not_to raise_error
expect(File.read(filepath)).not_to include('redirection')
end
context 'when redirection chunk appears at a later stage of the download' do
it 'raises an error' do
another_chunk_double = double('another redirection', size: 1000, code: 303)
data_chunk = double('data chunk', size: 1000, code: 200, http_response: double(to_hash: {}))
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
allow(client)
.to receive(:stream)
.and_yield(chunk_double)
.and_yield(data_chunk)
.and_yield(another_chunk_double)
end
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'File download error 303'
)
end
end
end
end
describe 'remote content validation' do
context 'on redirect chunk' do
let(:chunk_code) { 303 }
it 'does not run content type & length validations' do
expect(service).not_to receive(:validate_content_type)
expect(service).not_to receive(:validate_content_length)
service.execute
end
end
context 'when there is one data chunk' do
it 'validates content type & length' do
expect(service).to receive(:validate_content_type)
expect(service).to receive(:validate_content_length)
service.execute
end
end
context 'when there are multiple data chunks' do
it 'validates content type & length only once' do
data_chunk = double(
'data chunk',
size: 1000,
code: 200,
http_response: double(to_hash: {})
)
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
allow(client)
.to receive(:stream)
.and_yield(chunk_double)
.and_yield(data_chunk)
end
expect(service).to receive(:validate_content_type).once
expect(service).to receive(:validate_content_length).once
service.execute
end
end
end
context 'when file is a symlink' do
let_it_be(:symlink) { File.join(tmpdir, 'symlink') }
before do
FileUtils.ln_s(File.join(tmpdir, filename), symlink, force: true)
end
subject do
described_class.new(
configuration: config,
relative_url: '/test',
tmpdir: tmpdir,
filename: 'symlink',
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
it 'raises an error and removes the file' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Invalid downloaded file'
)
expect(File.exist?(symlink)).to eq(false)
end
end
context 'when file shares multiple hard links' do
let_it_be(:hard_link) { File.join(tmpdir, 'hard_link') }
before do
existing_file = File.join(Dir.mktmpdir, filename)
FileUtils.touch(existing_file)
FileUtils.link(existing_file, hard_link)
end
subject do
described_class.new(
configuration: config,
relative_url: '/test',
tmpdir: tmpdir,
filename: 'hard_link',
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
it 'raises an error and removes the file' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Invalid downloaded file'
)
expect(File.exist?(hard_link)).to eq(false)
end
end
context 'when dir is not in tmpdir' do
subject do
described_class.new(
configuration: config,
relative_url: '/test',
tmpdir: '/etc',
filename: filename,
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
it 'raises an error' do
expect { subject.execute }.to raise_error(
StandardError,
'path /etc is not allowed'
)
end
end
context 'when dir path is being traversed' do
subject do
described_class.new(
configuration: config,
relative_url: '/test',
tmpdir: File.join(Dir.mktmpdir('bulk_imports'), 'test', '..'),
filename: filename,
file_size_limit: file_size_limit,
allowed_content_types: allowed_content_types
)
end
it 'raises an error' do
expect { subject.execute }.to raise_error(
Gitlab::PathTraversal::PathTraversalAttackError,
'Invalid path'
)
end
end
context 'when using the remote filename' do
let_it_be(:filename) { nil }
context 'when no filename is given' do
it 'raises an error when the filename is not provided in the request header' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
'Remote filename not provided in content-disposition header'
)
end
end
context 'with a given filename' do
let_it_be(:content_disposition) { 'filename="avatar.png"' }
it 'uses the given filename' do
expect(subject.execute).to eq(File.join(tmpdir, "avatar.png"))
end
end
context 'when the filename is a path' do
let_it_be(:content_disposition) { 'filename="../../avatar.png"' }
it 'raises an error when the filename is not provided in the request header' do
expect(subject.execute).to eq(File.join(tmpdir, "avatar.png"))
end
end
context 'when the filename is longer the the limit' do
let_it_be(:content_disposition) { 'filename="../../xxx.b"' }
before do
stub_const('BulkImports::FileDownloads::FilenameFetch::FILENAME_SIZE_LIMIT', 1)
end
it 'raises an error when the filename is not provided in the request header' do
expect(subject.execute).to eq(File.join(tmpdir, "x.b"))
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class BatchedRelationExportService
include Gitlab::Utils::StrongMemoize
BATCH_SIZE = 1000
BATCH_CACHE_KEY = 'bulk_imports/batched_relation_export/%{export_id}/%{batch_id}'
CACHE_DURATION = 4.hours
def self.cache_key(export_id, batch_id)
Kernel.format(BATCH_CACHE_KEY, export_id: export_id, batch_id: batch_id)
end
def initialize(user, portable, relation, jid)
@user = user
@portable = portable
@relation = relation
@resolved_relation = portable.public_send(relation) # rubocop:disable GitlabSecurity/PublicSend
@jid = jid
end
def execute
return finish_export! if batches_count == 0
start_export!
export.batches.destroy_all # rubocop: disable Cop/DestroyAll
enqueue_batch_exports
FinishBatchedRelationExportWorker.perform_async(export.id)
end
private
attr_reader :user, :portable, :relation, :jid, :config, :resolved_relation
def export
@export ||= portable.bulk_import_exports.find_or_create_by!(relation: relation) # rubocop:disable CodeReuse/ActiveRecord
end
def objects_count
resolved_relation.count
end
def batches_count
objects_count.fdiv(BATCH_SIZE).ceil
end
def start_export!
update_export!('start')
end
def finish_export!
update_export!('finish')
end
def update_export!(event)
export.update!(
status_event: event,
total_objects_count: objects_count,
batched: true,
batches_count: batches_count,
jid: jid,
error: nil
)
end
def enqueue_batch_exports
resolved_relation.each_batch(of: BATCH_SIZE) do |batch, batch_number|
batch_id = find_or_create_batch(batch_number).id
ids = batch.pluck(batch.model.primary_key) # rubocop:disable CodeReuse/ActiveRecord
Gitlab::Cache::Import::Caching.set_add(self.class.cache_key(export.id, batch_id), ids, timeout: CACHE_DURATION)
RelationBatchExportWorker.perform_async(user.id, batch_id)
end
end
def find_or_create_batch(batch_number)
export.batches.find_or_create_by!(batch_number: batch_number) # rubocop:disable CodeReuse/ActiveRecord
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::BatchedRelationExportService, feature_category: :importers do
let_it_be(:user) { create(:user) }
let_it_be(:portable) { create(:group) }
let(:relation) { 'labels' }
let(:jid) { '123' }
subject(:service) { described_class.new(user, portable, relation, jid) }
describe '#execute' do
context 'when there are batches to export' do
let_it_be(:label) { create(:group_label, group: portable) }
it 'marks export as started' do
service.execute
export = portable.bulk_import_exports.first
expect(export.reload.started?).to eq(true)
end
it 'removes existing batches' do
expect_next_instance_of(BulkImports::Export) do |export|
expect(export.batches).to receive(:destroy_all)
end
service.execute
end
it 'enqueues export jobs for each batch & caches batch record ids' do
expect(BulkImports::RelationBatchExportWorker).to receive(:perform_async)
expect(Gitlab::Cache::Import::Caching).to receive(:set_add)
service.execute
end
it 'enqueues FinishBatchedRelationExportWorker' do
expect(BulkImports::FinishBatchedRelationExportWorker).to receive(:perform_async)
service.execute
end
context 'when there are multiple batches' do
it 'creates a batch record for each batch of records' do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
create_list(:group_label, 10, group: portable)
service.execute
export = portable.bulk_import_exports.first
expect(export.batches.count).to eq(11)
end
end
context 'when an error occurs during batches creation' do
it 'does not enqueue FinishBatchedRelationExportWorker' do
allow(service).to receive(:enqueue_batch_exports).and_raise(StandardError)
expect(BulkImports::FinishBatchedRelationExportWorker).not_to receive(:perform_async)
expect { service.execute }.to raise_error(StandardError)
end
end
end
context 'when there are no batches to export' do
let(:relation) { 'milestones' }
it 'marks export as finished' do
service.execute
export = portable.bulk_import_exports.first
expect(export.finished?).to eq(true)
expect(export.batches.count).to eq(0)
end
end
end
describe '.cache_key' do
it 'returns cache key given export and batch ids' do
expect(described_class.cache_key(1, 1)).to eq('bulk_imports/batched_relation_export/1/1')
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class RepositoryBundleExportService
def initialize(repository, export_path, export_filename)
@repository = repository
@export_path = export_path
@export_filename = export_filename
end
def execute(_options = {})
return unless repository_exists?
repository.bundle_to_disk(bundle_filepath)
end
private
attr_reader :repository, :export_path, :export_filename
def repository_exists?
repository.exists? && !repository.empty?
end
def bundle_filepath
File.join(export_path, "#{export_filename}.bundle")
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::RepositoryBundleExportService, feature_category: :importers do
let(:project) { create(:project) }
let(:export_path) { Dir.mktmpdir }
subject(:service) { described_class.new(repository, export_path, export_filename) }
after do
FileUtils.remove_entry(export_path) if Dir.exist?(export_path)
end
describe '#execute' do
shared_examples 'repository export' do
context 'when repository exists' do
it 'bundles repository to disk' do
allow(repository).to receive(:exists?).and_return(true)
allow(repository).to receive(:empty?).and_return(false)
expect(repository).to receive(:bundle_to_disk).with(File.join(export_path, "#{export_filename}.bundle"))
service.execute
end
end
context 'when repository does not exist' do
it 'does not bundle repository to disk' do
allow(repository).to receive(:exists?).and_return(false)
expect(repository).not_to receive(:bundle_to_disk)
service.execute
end
end
context 'when repository is empty' do
it 'does not bundle repository to disk' do
allow(repository).to receive(:empty?).and_return(true)
expect(repository).not_to receive(:bundle_to_disk)
service.execute
end
end
end
include_examples 'repository export' do
let(:repository) { project.repository }
let(:export_filename) { 'repository' }
end
include_examples 'repository export' do
let(:repository) { project.design_repository }
let(:export_filename) { 'design' }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class ExportService
# @param portable [Project|Group] A project or a group to export.
# @param user [User] A user performing the export.
# @param batched [Boolean] Whether to export the data in batches.
def initialize(portable:, user:, batched: false)
@portable = portable
@current_user = user
@batched = batched
end
def execute
validate_user_permissions!
FileTransfer.config_for(portable).portable_relations.each do |relation|
RelationExportWorker.perform_async(current_user.id, portable.id, portable.class.name, relation, batched)
end
ServiceResponse.success
rescue StandardError => e
ServiceResponse.error(
message: e.class,
http_status: :unprocessable_entity
)
end
private
attr_reader :portable, :current_user, :batched
def validate_user_permissions!
ability = "admin_#{portable.to_ability_name}"
current_user.can?(ability, portable) ||
raise(::Gitlab::ImportExport::Error.permission_error(current_user, portable))
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ExportService, feature_category: :importers do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
before do
group.add_owner(user)
end
subject { described_class.new(portable: group, user: user) }
describe '#execute' do
let_it_be(:top_level_relations) { BulkImports::FileTransfer.config_for(group).portable_relations }
before do
allow(subject).to receive(:execute).and_return(ServiceResponse.success).and_call_original
end
context 'when export is not batched' do
it 'schedules RelationExportWorker for each top level relation' do
top_level_relations.each do |relation|
expect(BulkImports::RelationExportWorker)
.to receive(:perform_async)
.with(user.id, group.id, group.class.name, relation, false)
end
subject.execute
end
end
context 'when export is batched' do
subject { described_class.new(portable: group, user: user, batched: true) }
it 'schedules RelationExportWorker with a `batched: true` flag' do
top_level_relations.each do |relation|
expect(BulkImports::RelationExportWorker)
.to receive(:perform_async)
.with(user.id, group.id, group.class.name, relation, true)
end
subject.execute
end
end
context 'when exception occurs' do
it 'does not schedule RelationExportWorker' do
service = described_class.new(portable: nil, user: user)
expect(service)
.to receive(:execute)
.and_return(ServiceResponse.error(message: 'Gitlab::ImportExport::Error', http_status: :unprocessible_entity))
.and_call_original
expect(BulkImports::RelationExportWorker).not_to receive(:perform_async)
service.execute
end
context 'when user is not allowed to perform export' do
let(:another_user) { create(:user) }
it 'does not schedule RelationExportWorker' do
another_user = create(:user)
service = described_class.new(portable: group, user: another_user)
response = service.execute
expect(response.status).to eq(:error)
expect(response.message).to eq(Gitlab::ImportExport::Error)
expect(response.http_status).to eq(:unprocessable_entity)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class ProcessService
PERFORM_DELAY = 5.seconds
DEFAULT_BATCH_SIZE = 5
attr_reader :bulk_import
def initialize(bulk_import)
@bulk_import = bulk_import
end
def execute
return unless bulk_import
return if bulk_import.completed?
return bulk_import.fail_op! if all_entities_failed?
return bulk_import.finish! if all_entities_processed? && bulk_import.started?
return re_enqueue if max_batch_size_exceeded? # Do not start more jobs if max allowed are already running
process_bulk_import
re_enqueue
end
private
def process_bulk_import
bulk_import.start! if bulk_import.created?
created_entities.first(next_batch_size).each do |entity|
create_tracker(entity)
entity.start!
Gitlab::ApplicationContext.with_context(bulk_import_entity_id: entity.id) do
BulkImports::ExportRequestWorker.perform_async(entity.id)
end
end
end
def entities
@entities ||= bulk_import.entities
end
def created_entities
entities.with_status(:created)
end
def all_entities_processed?
entities.all? { |entity| entity.finished? || entity.failed? }
end
def all_entities_failed?
entities.all?(&:failed?)
end
# A new BulkImportWorker job is enqueued to either
# - Process the new BulkImports::Entity created during import (e.g. for the subgroups)
# - Or to mark the `bulk_import` as finished
def re_enqueue
BulkImportWorker.perform_in(PERFORM_DELAY, bulk_import.id)
end
def started_entities
entities.with_status(:started)
end
def max_batch_size_exceeded?
started_entities.count >= DEFAULT_BATCH_SIZE
end
def next_batch_size
[DEFAULT_BATCH_SIZE - started_entities.count, 0].max
end
def create_tracker(entity)
entity.class.transaction do
entity.pipelines.each do |pipeline|
status = skip_pipeline?(pipeline, entity) ? :skipped : :created
entity.trackers.create!(
stage: pipeline[:stage],
pipeline_name: pipeline[:pipeline],
status: BulkImports::Tracker.state_machine.states[status].value
)
end
end
end
def skip_pipeline?(pipeline, entity)
return false unless entity.source_version.valid?
minimum_version, maximum_version = pipeline.values_at(:minimum_source_version, :maximum_source_version)
if source_version_out_of_range?(minimum_version, maximum_version, entity.source_version.without_patch)
log_skipped_pipeline(pipeline, entity, minimum_version, maximum_version)
return true
end
false
end
def source_version_out_of_range?(minimum_version, maximum_version, non_patch_source_version)
(minimum_version && non_patch_source_version < Gitlab::VersionInfo.parse(minimum_version)) ||
(maximum_version && non_patch_source_version > Gitlab::VersionInfo.parse(maximum_version))
end
def log_skipped_pipeline(pipeline, entity, minimum_version, maximum_version)
logger.info(
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
pipeline_class: pipeline[:pipeline],
minimum_source_version: minimum_version,
maximum_source_version: maximum_version,
source_version: entity.source_version.to_s
)
end
def logger
@logger ||= Logger.build
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ProcessService, feature_category: :importers do
describe '#execute' do
let_it_be_with_reload(:bulk_import) { create(:bulk_import) }
subject { described_class.new(bulk_import) }
context 'when no bulk import is found' do
let(:bulk_import) { nil }
it 'does nothing' do
expect(described_class).not_to receive(:process_bulk_import)
subject.execute
end
end
context 'when bulk import is finished' do
it 'does nothing' do
bulk_import.update!(status: 2)
expect(described_class).not_to receive(:process_bulk_import)
subject.execute
end
end
context 'when bulk import is failed' do
it 'does nothing' do
bulk_import.update!(status: -1)
expect(described_class).not_to receive(:process_bulk_import)
subject.execute
end
end
context 'when bulk import has timed out' do
it 'does nothing' do
bulk_import.update!(status: 3)
expect(described_class).not_to receive(:process_bulk_import)
subject.execute
end
end
context 'when all entities are processed' do
it 'marks bulk import as finished' do
bulk_import.update!(status: 1)
create(:bulk_import_entity, :finished, bulk_import: bulk_import)
create(:bulk_import_entity, :failed, bulk_import: bulk_import)
subject.execute
expect(bulk_import.reload.finished?).to eq(true)
end
end
context 'when all entities are failed' do
it 'marks bulk import as failed' do
bulk_import.update!(status: 1)
create(:bulk_import_entity, :failed, bulk_import: bulk_import)
create(:bulk_import_entity, :failed, bulk_import: bulk_import)
subject.execute
expect(bulk_import.reload.failed?).to eq(true)
end
end
context 'when maximum allowed number of import entities in progress' do
it 're-enqueues itself' do
bulk_import.update!(status: 1)
create(:bulk_import_entity, :created, bulk_import: bulk_import)
(described_class::DEFAULT_BATCH_SIZE + 1).times do
create(:bulk_import_entity, :started, bulk_import: bulk_import)
end
expect(BulkImportWorker).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
expect(BulkImports::ExportRequestWorker).not_to receive(:perform_async)
subject.execute
end
end
context 'when bulk import is created' do
it 'marks bulk import as started' do
create(:bulk_import_entity, :created, bulk_import: bulk_import)
subject.execute
expect(bulk_import.reload.started?).to eq(true)
end
it 'creates all the required pipeline trackers' do
entity_1 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
entity_2 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
expect { subject.execute }
.to change { BulkImports::Tracker.count }
.by(BulkImports::Groups::Stage.new(entity_1).pipelines.size * 2)
expect(entity_1.trackers).not_to be_empty
expect(entity_2.trackers).not_to be_empty
end
context 'when there are created entities to process' do
before do
stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
end
it 'marks a batch of entities as started, enqueues EntityWorker, ExportRequestWorker and reenqueues' do
create(:bulk_import_entity, :created, bulk_import: bulk_import)
create(:bulk_import_entity, :created, bulk_import: bulk_import)
expect(BulkImportWorker).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
subject.execute
bulk_import.reload
expect(bulk_import.entities.map(&:status_name)).to contain_exactly(:created, :started)
end
context 'when there are project entities to process' do
it 'enqueues ExportRequestWorker' do
create(:bulk_import_entity, :created, :project_entity, bulk_import: bulk_import)
expect(BulkImports::ExportRequestWorker).to receive(:perform_async).once
subject.execute
end
end
end
end
context 'when importing a group' do
it 'creates trackers for group entity' do
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
subject.execute
expect(entity.trackers.to_a).to include(
have_attributes(
stage: 0, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupPipeline.to_s
),
have_attributes(
stage: 1, status_name: :created, relation: BulkImports::Groups::Pipelines::GroupAttributesPipeline.to_s
)
)
end
end
context 'when importing a project' do
it 'creates trackers for project entity' do
entity = create(:bulk_import_entity, :project_entity, bulk_import: bulk_import)
subject.execute
expect(entity.trackers.to_a).to include(
have_attributes(
stage: 0, status_name: :created, relation: BulkImports::Projects::Pipelines::ProjectPipeline.to_s
),
have_attributes(
stage: 1, status_name: :created, relation: BulkImports::Projects::Pipelines::RepositoryPipeline.to_s
)
)
end
end
context 'when tracker configuration has a minimum version defined' do
before do
allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
allow(stage).to receive(:config).and_return(
{
pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
pipeline2: { pipeline: 'PipelineClass2', stage: 1, minimum_source_version: '14.10.0' },
pipeline3: { pipeline: 'PipelineClass3', stage: 1, minimum_source_version: '15.0.0' },
pipeline5: { pipeline: 'PipelineClass4', stage: 1, minimum_source_version: '15.1.0' },
pipeline6: { pipeline: 'PipelineClass5', stage: 1, minimum_source_version: '16.0.0' }
}
)
end
end
context 'when the source instance version is older than the tracker mininum version' do
let_it_be(:entity) { create(:bulk_import_entity, :group_entity, bulk_import: bulk_import) }
before do
bulk_import.update!(source_version: '15.0.0')
end
it 'creates trackers as skipped if version requirement does not meet' do
subject.execute
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:created, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:skipped, 'PipelineClass4'],
[:skipped, 'PipelineClass5']
)
end
it 'logs an info message for the skipped pipelines' do
expect_next_instance_of(BulkImports::Logger) do |logger|
expect(logger).to receive(:info).with(
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
pipeline_class: 'PipelineClass4',
minimum_source_version: '15.1.0',
maximum_source_version: nil,
source_version: '15.0.0'
)
expect(logger).to receive(:info).with(
message: 'Pipeline skipped as source instance version not compatible with pipeline',
bulk_import_entity_id: entity.id,
bulk_import_id: entity.bulk_import_id,
bulk_import_entity_type: entity.source_type,
source_full_path: entity.source_full_path,
pipeline_class: 'PipelineClass5',
minimum_source_version: '16.0.0',
maximum_source_version: nil,
source_version: '15.0.0'
)
end
subject.execute
end
end
context 'when the source instance version is undefined' do
it 'creates trackers as created' do
bulk_import.update!(source_version: nil)
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
subject.execute
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:created, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:created, 'PipelineClass4'],
[:created, 'PipelineClass5']
)
end
end
end
context 'when tracker configuration has a maximum version defined' do
before do
allow_next_instance_of(BulkImports::Groups::Stage) do |stage|
allow(stage).to receive(:config).and_return(
{
pipeline1: { pipeline: 'PipelineClass1', stage: 0 },
pipeline2: { pipeline: 'PipelineClass2', stage: 1, maximum_source_version: '14.10.0' },
pipeline3: { pipeline: 'PipelineClass3', stage: 1, maximum_source_version: '15.0.0' },
pipeline5: { pipeline: 'PipelineClass4', stage: 1, maximum_source_version: '15.1.0' },
pipeline6: { pipeline: 'PipelineClass5', stage: 1, maximum_source_version: '16.0.0' }
}
)
end
end
context 'when the source instance version is older than the tracker maximum version' do
it 'creates trackers as skipped if version requirement does not meet' do
bulk_import.update!(source_version: '15.0.0')
entity = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import)
subject.execute
expect(entity.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }).to contain_exactly(
[:created, 'PipelineClass1'],
[:skipped, 'PipelineClass2'],
[:created, 'PipelineClass3'],
[:created, 'PipelineClass4'],
[:created, 'PipelineClass5']
)
end
end
context 'when the source instance version is a patch version' do
it 'creates trackers with the same status as the non-patch source version' do
bulk_import_1 = create(:bulk_import, source_version: '15.0.1')
entity_1 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_1)
bulk_import_2 = create(:bulk_import, source_version: '15.0.0')
entity_2 = create(:bulk_import_entity, :group_entity, bulk_import: bulk_import_2)
described_class.new(bulk_import_1).execute
described_class.new(bulk_import_2).execute
trackers_1 = entity_1.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
trackers_2 = entity_2.trackers.collect { |tracker| [tracker.status_name, tracker.relation] }
expect(trackers_1).to eq(trackers_2)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class RelationBatchExportService
include Gitlab::ImportExport::CommandLineUtil
def initialize(user, batch)
@user = user
@batch = batch
@config = FileTransfer.config_for(portable)
end
def execute
start_batch!
export_service.export_batch(relation_batch_ids)
ensure_export_file_exists!
compress_exported_relation
upload_compressed_file
finish_batch!
ensure
FileUtils.remove_entry(export_path)
end
private
attr_reader :user, :batch, :config
delegate :export_path, to: :config
delegate :batch_number, :export, to: :batch
delegate :portable, :relation, to: :export
delegate :exported_filename, :exported_objects_count, to: :export_service
def export_service
@export_service ||= config.export_service_for(relation).new(portable, export_path, relation, user)
end
def compress_exported_relation
gzip(dir: export_path, filename: exported_filename)
end
def upload_compressed_file
File.open(compressed_filename) { |file| batch_upload.export_file = file }
batch_upload.save!
end
def batch_upload
@batch_upload ||= ::BulkImports::ExportUpload.find_or_initialize_by(export_id: export.id, batch_id: batch.id) # rubocop: disable CodeReuse/ActiveRecord
end
def compressed_filename
File.join(export_path, "#{exported_filename}.gz")
end
def relation_batch_ids
Gitlab::Cache::Import::Caching.values_from_set(cache_key).map(&:to_i)
end
def cache_key
BulkImports::BatchedRelationExportService.cache_key(export.id, batch.id)
end
def start_batch!
batch.update!(status_event: 'start', objects_count: 0, error: nil)
end
def finish_batch!
batch.update!(status_event: 'finish', objects_count: exported_objects_count, error: nil)
end
def exported_filepath
File.join(export_path, exported_filename)
end
# Create empty file on disk
# if relation is empty and nothing was exported
def ensure_export_file_exists!
FileUtils.touch(exported_filepath)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::RelationBatchExportService, feature_category: :importers do
let_it_be(:project) { create(:project) }
let_it_be(:label) { create(:label, project: project) }
let_it_be(:user) { create(:user) }
let_it_be(:export) { create(:bulk_import_export, :batched, project: project) }
let_it_be(:batch) { create(:bulk_import_export_batch, export: export) }
let_it_be(:cache_key) { BulkImports::BatchedRelationExportService.cache_key(export.id, batch.id) }
subject(:service) { described_class.new(user, batch) }
before_all do
Gitlab::Cache::Import::Caching.set_add(cache_key, label.id)
end
after(:all) do
Gitlab::Cache::Import::Caching.expire(cache_key, 0)
end
describe '#execute' do
it 'exports relation batch' do
expect(Gitlab::Cache::Import::Caching).to receive(:values_from_set).with(cache_key).and_call_original
service.execute
batch.reload
expect(batch.finished?).to eq(true)
expect(batch.objects_count).to eq(1)
expect(batch.error).to be_nil
expect(export.upload.export_file).to be_present
end
it 'removes exported contents after export' do
allow(subject).to receive(:export_path).and_return('foo')
allow(FileUtils).to receive(:remove_entry)
expect(FileUtils).to receive(:remove_entry).with('foo')
service.execute
end
context 'when relation is empty and there is nothing to export' do
let_it_be(:export) { create(:bulk_import_export, :batched, project: project, relation: 'milestones') }
let_it_be(:batch) { create(:bulk_import_export_batch, export: export) }
it 'creates empty file on disk' do
allow(subject).to receive(:export_path).and_return('foo')
allow(FileUtils).to receive(:remove_entry)
expect(FileUtils).to receive(:touch).with('foo/milestones.ndjson').and_call_original
subject.execute
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Archive Extraction Service allows extraction of contents
# from `tar` archives with an additional handling (removal)
# of file symlinks.
#
# @param tmpdir [String] A path where archive is located
# and where its contents are extracted.
# Tmpdir directory must be located under `Dir.tmpdir`.
# `BulkImports::Error` is raised if any other directory path is used.
#
# @param filename [String] Name of the file to extract contents from.
#
# @example
# dir = Dir.mktmpdir
# filename = 'things.tar'
# BulkImports::ArchiveExtractionService.new(tmpdir: dir, filename: filename).execute
# Dir.glob(File.join(dir, '**', '*'))
# => ['/path/to/tmp/dir/extracted_file_1', '/path/to/tmp/dir/extracted_file_2', '/path/to/tmp/dir/extracted_file_3']
module BulkImports
class ArchiveExtractionService
include Gitlab::ImportExport::CommandLineUtil
def initialize(tmpdir:, filename:)
@tmpdir = tmpdir
@filename = filename
@filepath = File.join(@tmpdir, @filename)
end
def execute
validate_tmpdir
validate_filepath
validate_symlink
extract_archive
tmpdir
end
private
attr_reader :tmpdir, :filename, :filepath
def validate_filepath
Gitlab::PathTraversal.check_path_traversal!(filepath)
end
def validate_tmpdir
Gitlab::PathTraversal.check_allowed_absolute_path!(tmpdir, [Dir.tmpdir])
end
def validate_symlink
raise(BulkImports::Error, 'Invalid file') if Gitlab::Utils::FileInfo.linked?(filepath)
end
def extract_archive
untar_xf(archive: filepath, dir: tmpdir)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::ArchiveExtractionService, feature_category: :importers do
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:filename) { 'symlink_export.tar' }
let_it_be(:filepath) { File.join(tmpdir, filename) }
before do
FileUtils.copy_file(File.join('spec', 'fixtures', filename), filepath)
end
after(:all) do
FileUtils.remove_entry(tmpdir)
end
subject(:service) { described_class.new(tmpdir: tmpdir, filename: filename) }
describe '#execute' do
it 'extracts files from archive and removes symlinks' do
file = File.join(tmpdir, 'project.json')
folder = File.join(tmpdir, 'uploads')
symlink = File.join(tmpdir, 'uploads', 'link.gitignore')
expect(service).to receive(:untar_xf).with(archive: filepath, dir: tmpdir).and_call_original
service.execute
expect(File.exist?(file)).to eq(true)
expect(Dir.exist?(folder)).to eq(true)
expect(File.exist?(symlink)).to eq(false)
end
context 'when dir is not in tmpdir' do
it 'raises an error' do
['/etc', '/usr', '/', '/home', '/some/other/path', Rails.root.to_s].each do |path|
expect { described_class.new(tmpdir: path, filename: 'filename').execute }
.to raise_error(StandardError, "path #{path} is not allowed")
end
end
end
context 'when archive file is a symlink' do
it 'raises an error' do
FileUtils.ln_s(filepath, File.join(tmpdir, 'symlink'))
expect { described_class.new(tmpdir: tmpdir, filename: 'symlink').execute }
.to raise_error(BulkImports::Error, 'Invalid file')
end
end
context 'when archive file shares multiple hard links' do
it 'raises an error' do
FileUtils.link(filepath, File.join(tmpdir, 'hard_link'))
expect { subject.execute }.to raise_error(BulkImports::Error, 'Invalid file')
end
end
context 'when filepath is being traversed' do
it 'raises an error' do
expect { described_class.new(tmpdir: File.join(Dir.mktmpdir, 'test', '..'), filename: 'name').execute }
.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class FileExportService
include Gitlab::ImportExport::CommandLineUtil
SINGLE_OBJECT_RELATIONS = [
FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION,
FileTransfer::ProjectConfig::DESIGN_BUNDLE_RELATION
].freeze
def initialize(portable, export_path, relation, user)
@portable = portable
@export_path = export_path
@relation = relation
@user = user # not used anywhere in this class at the moment
end
def execute(options = {})
export_service.execute(options)
archive_exported_data
end
def export_batch(ids)
execute(batch_ids: ids)
end
def exported_filename
"#{relation}.tar"
end
def exported_objects_count
case relation
when *SINGLE_OBJECT_RELATIONS
1
else
export_service.exported_objects_count
end
end
private
attr_reader :export_path, :portable, :relation
def export_service
@export_service ||= case relation
when FileTransfer::BaseConfig::UPLOADS_RELATION
UploadsExportService.new(portable, export_path)
when FileTransfer::ProjectConfig::LFS_OBJECTS_RELATION
LfsObjectsExportService.new(portable, export_path)
when FileTransfer::ProjectConfig::REPOSITORY_BUNDLE_RELATION
RepositoryBundleExportService.new(portable.repository, export_path, relation)
when FileTransfer::ProjectConfig::DESIGN_BUNDLE_RELATION
RepositoryBundleExportService.new(portable.design_repository, export_path, relation)
else
raise BulkImports::Error, 'Unsupported relation export type'
end
end
def archive_exported_data
archive_file = File.join(export_path, exported_filename)
tar_cf(archive: archive_file, dir: export_path)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::FileExportService, feature_category: :importers do
let_it_be(:project) { create(:project) }
let(:relations) do
{
'uploads' => BulkImports::UploadsExportService,
'lfs_objects' => BulkImports::LfsObjectsExportService,
'repository' => BulkImports::RepositoryBundleExportService,
'design' => BulkImports::RepositoryBundleExportService
}
end
describe '#execute' do
it 'executes export service and archives exported data for each file relation' do
relations.each do |relation, klass|
Dir.mktmpdir do |export_path|
service = described_class.new(project, export_path, relation, nil)
expect_next_instance_of(klass) do |service|
expect(service).to receive(:execute)
end
expect(service).to receive(:tar_cf).with(archive: File.join(export_path, "#{relation}.tar"), dir: export_path)
service.execute
end
end
end
context 'when unsupported relation is passed' do
it 'raises an error' do
service = described_class.new(project, nil, 'unsupported', nil)
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
end
end
describe '#execute_batch' do
it 'calls execute with provided array of record ids' do
relations.each do |relation, klass|
Dir.mktmpdir do |export_path|
service = described_class.new(project, export_path, relation, nil)
expect_next_instance_of(klass) do |service|
expect(service).to receive(:execute).with({ batch_ids: [1, 2, 3] })
end
service.export_batch([1, 2, 3])
end
end
end
end
describe '#exported_filename' do
it 'returns filename of the exported file' do
service = described_class.new(project, nil, 'uploads', nil)
expect(service.exported_filename).to eq('uploads.tar')
end
end
describe '#exported_objects_count' do
context 'when relation is a collection' do
it 'returns a number of exported relations' do
%w[uploads lfs_objects].each do |relation|
service = described_class.new(project, nil, relation, nil)
allow(service).to receive_message_chain(:export_service, :exported_objects_count).and_return(10)
expect(service.exported_objects_count).to eq(10)
end
end
end
context 'when relation is a repository' do
it 'returns 1' do
%w[repository design].each do |relation|
service = described_class.new(project, nil, relation, nil)
expect(service.exported_objects_count).to eq(1)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Entry point of the BulkImport/Direct Transfer feature.
# This service receives a Gitlab Instance connection params
# and a list of groups or projects to be imported.
#
# Process topography:
#
# sync | async
# |
# User +--> P1 +----> Pn +---+
# | ^ | Enqueue new job
# | +-----+
#
# P1 (sync)
#
# - Create a BulkImport record
# - Create a BulkImport::Entity for each group or project (entities) to be imported
# - Enqueue a BulkImportWorker job (P2) to import the given entity
#
# Pn (async)
#
# - For each group to be imported (BulkImport::Entity.with_status(:created))
# - Import the group data
# - Create entities for each subgroup of the imported group
# - Create entities for each project of the imported group
# - Enqueue a BulkImportWorker job (Pn) to import the new entities
module BulkImports
class CreateService
ENTITY_TYPES_MAPPING = {
'group_entity' => 'groups',
'project_entity' => 'projects'
}.freeze
attr_reader :current_user, :params, :credentials
def initialize(current_user, params, credentials)
@current_user = current_user
@params = params
@credentials = credentials
end
def execute
validate!
bulk_import = create_bulk_import
Gitlab::Tracking.event(
self.class.name,
'create',
label: 'bulk_import_group',
extra: { source_equals_destination: source_equals_destination? }
)
BulkImportWorker.perform_async(bulk_import.id)
ServiceResponse.success(payload: bulk_import)
rescue ActiveRecord::RecordInvalid, BulkImports::Error, BulkImports::NetworkError => e
ServiceResponse.error(
message: e.message,
http_status: :unprocessable_entity
)
end
private
def validate!
client.validate_instance_version!
validate_setting_enabled!
client.validate_import_scopes!
end
def create_bulk_import
BulkImport.transaction do
bulk_import = BulkImport.create!(
user: current_user,
source_type: 'gitlab',
source_version: client.instance_version,
source_enterprise: client.instance_enterprise
)
bulk_import.create_configuration!(credentials.slice(:url, :access_token))
Array.wrap(params).each do |entity_params|
track_access_level(entity_params)
validate_destination_namespace(entity_params)
validate_destination_slug(entity_params[:destination_slug] || entity_params[:destination_name])
validate_destination_full_path(entity_params)
BulkImports::Entity.create!(
bulk_import: bulk_import,
source_type: entity_params[:source_type],
source_full_path: entity_params[:source_full_path],
destination_slug: entity_params[:destination_slug] || entity_params[:destination_name],
destination_namespace: entity_params[:destination_namespace],
migrate_projects: Gitlab::Utils.to_boolean(entity_params[:migrate_projects], default: true)
)
end
bulk_import
end
end
def validate_setting_enabled!
source_full_path, source_type = Array.wrap(params)[0].values_at(:source_full_path, :source_type)
entity_type = ENTITY_TYPES_MAPPING.fetch(source_type)
if /^[0-9]+$/.match?(source_full_path)
query = query_type(entity_type)
response = graphql_client.execute(
graphql_client.parse(query.to_s),
{ full_path: source_full_path }
).original_hash
source_entity_identifier = ::GlobalID.parse(response.dig(*query.data_path, 'id')).model_id
else
source_entity_identifier = ERB::Util.url_encode(source_full_path)
end
client.get("/#{entity_type}/#{source_entity_identifier}/export_relations/status")
end
def track_access_level(entity_params)
Gitlab::Tracking.event(
self.class.name,
'create',
label: 'import_access_level',
user: current_user,
extra: { user_role: user_role(entity_params[:destination_namespace]), import_type: 'bulk_import_group' }
)
end
def source_equals_destination?
credentials[:url].starts_with?(Settings.gitlab.base_url)
end
def validate_destination_namespace(entity_params)
destination_namespace = entity_params[:destination_namespace]
source_type = entity_params[:source_type]
return if destination_namespace.blank?
group = Group.find_by_full_path(destination_namespace)
if group.nil? ||
(source_type == 'group_entity' && !current_user.can?(:create_subgroup, group)) ||
(source_type == 'project_entity' && !current_user.can?(:import_projects, group))
raise BulkImports::Error.destination_namespace_validation_failure(destination_namespace)
end
end
def validate_destination_slug(destination_slug)
return if Gitlab::Regex.oci_repository_path_regex.match?(destination_slug)
raise BulkImports::Error.destination_slug_validation_failure
end
def validate_destination_full_path(entity_params)
source_type = entity_params[:source_type]
full_path = [
entity_params[:destination_namespace],
entity_params[:destination_slug] || entity_params[:destination_name]
].reject(&:blank?).join('/')
case source_type
when 'group_entity'
return if Namespace.find_by_full_path(full_path).nil?
when 'project_entity'
return if Project.find_by_full_path(full_path).nil?
end
raise BulkImports::Error.destination_full_path_validation_failure(full_path)
end
def user_role(destination_namespace)
namespace = Namespace.find_by_full_path(destination_namespace)
# if there is no parent namespace we assume user will be group creator/owner
return owner_role unless destination_namespace
return owner_role unless namespace
return owner_role unless namespace.group_namespace? # user namespace
membership = current_user.group_members.find_by(source_id: namespace.id) # rubocop:disable CodeReuse/ActiveRecord
return 'Not a member' unless membership
Gitlab::Access.human_access(membership.access_level)
end
def owner_role
Gitlab::Access.human_access(Gitlab::Access::OWNER)
end
def client
@client ||= BulkImports::Clients::HTTP.new(
url: @credentials[:url],
token: @credentials[:access_token]
)
end
def graphql_client
@graphql_client ||= BulkImports::Clients::Graphql.new(
url: @credentials[:url],
token: @credentials[:access_token]
)
end
def query_type(entity_type)
if entity_type == 'groups'
BulkImports::Groups::Graphql::GetGroupQuery.new(context: nil)
else
BulkImports::Projects::Graphql::GetProjectQuery.new(context: nil)
end
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::CreateService, feature_category: :importers do
let(:user) { create(:user) }
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
let(:destination_group) { create(:group, path: 'destination1') }
let(:migrate_projects) { true }
let_it_be(:parent_group) { create(:group, path: 'parent-group') }
# note: destination_name and destination_slug are currently interchangable so we need to test for both possibilities
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination-group-1',
destination_namespace: 'parent-group',
migrate_projects: migrate_projects
},
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group2',
destination_name: 'destination-group-2',
destination_namespace: 'parent-group',
migrate_projects: migrate_projects
},
{
source_type: 'project_entity',
source_full_path: 'full/path/to/project1',
destination_slug: 'destination-project-1',
destination_namespace: 'parent-group',
migrate_projects: migrate_projects
}
]
end
let(:source_entity_identifier) { ERB::Util.url_encode(params[0][:source_full_path]) }
let(:source_entity_type) { BulkImports::CreateService::ENTITY_TYPES_MAPPING.fetch(params[0][:source_type]) }
subject { described_class.new(user, params, credentials) }
describe '#execute' do
context 'when gitlab version is 15.5 or higher' do
let(:source_version) { { version: "15.6.0", enterprise: false } }
context 'when a BulkImports::Error is raised while validating the instance version' do
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
allow(client)
.to receive(:validate_instance_version!)
.and_raise(BulkImports::Error, "This is a BulkImports error.")
end
end
it 'rescues the error and raises a ServiceResponse::Error' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message).to eq("This is a BulkImports error.")
end
end
# response when authorize_admin_project in API endpoint fails
context 'when direct transfer status query returns a 403' do
it 'raises a ServiceResponse::Error' do
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:validate_instance_version!).and_return(true)
expect(client).to receive(:get)
.with("/groups/full%2Fpath%2Fto%2Fgroup1/export_relations/status")
.and_raise(BulkImports::NetworkError, '403 Forbidden')
end
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message).to eq("403 Forbidden")
end
end
context 'when direct transfer setting query returns a 404' do
it 'raises a ServiceResponse::Error' do
stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
.to_return(
status: 200,
body: source_version.to_json,
headers: { 'Content-Type' => 'application/json' }
)
stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token")
.to_return(status: 404)
expect_next_instance_of(BulkImports::Clients::HTTP) do |client|
expect(client).to receive(:get).and_raise(BulkImports::Error.setting_not_enabled)
end
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Group import disabled on source or destination instance. " \
"Ask an administrator to enable it on both instances and try again."
)
end
end
context 'when required scopes are not present' do
it 'returns ServiceResponse with error if token does not have api scope' do
stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
.to_return(
status: 200,
body: source_version.to_json,
headers: { 'Content-Type' => 'application/json' }
)
stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token")
.to_return(
status: 200
)
allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
allow(client).to receive(:validate_import_scopes!)
.and_raise(BulkImports::Error.scope_or_url_validation_failure)
end
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Check that the source instance base URL and the personal access token meet the necessary requirements."
)
end
end
context 'when token validation succeeds' do
before do
stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
.to_return(status: 200, body: source_version.to_json, headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token")
.to_return(
status: 200
)
stub_request(:get, 'http://gitlab.example/api/v4/personal_access_tokens/self?private_token=token')
.to_return(
status: 200,
body: { 'scopes' => ['api'] }.to_json,
headers: { 'Content-Type' => 'application/json' }
)
parent_group.add_owner(user)
end
it 'creates bulk import' do
expect { subject.execute }.to change { BulkImport.count }.by(1)
last_bulk_import = BulkImport.last
expect(last_bulk_import.user).to eq(user)
expect(last_bulk_import.source_version).to eq(source_version[:version])
expect(last_bulk_import.user).to eq(user)
expect(last_bulk_import.source_enterprise).to eq(false)
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'bulk_import_group',
extra: { source_equals_destination: false }
)
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
)
end
context 'on the same instance' do
before do
allow(Settings.gitlab).to receive(:base_url).and_return('http://gitlab.example')
end
it 'tracks the same instance migration' do
expect { subject.execute }.to change { BulkImport.count }.by(1)
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'bulk_import_group',
extra: { source_equals_destination: true }
)
end
end
describe 'projects migration flag' do
let(:import) { BulkImport.last }
context 'when false' do
let(:migrate_projects) { false }
it 'sets false' do
subject.execute
expect(import.entities.pluck(:migrate_projects)).to contain_exactly(false, false, false)
end
end
context 'when true' do
let(:migrate_projects) { true }
it 'sets true' do
subject.execute
expect(import.entities.pluck(:migrate_projects)).to contain_exactly(true, true, true)
end
end
context 'when nil' do
let(:migrate_projects) { nil }
it 'sets true' do
subject.execute
expect(import.entities.pluck(:migrate_projects)).to contain_exactly(true, true, true)
end
end
end
end
end
context 'when gitlab version is lower than 15.5' do
let(:source_version) do
Gitlab::VersionInfo.new(
::BulkImport::MIN_MAJOR_VERSION,
::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT
)
end
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
allow(instance).to receive(:instance_version).and_return(source_version)
allow(instance).to receive(:instance_enterprise).and_return(false)
stub_request(:get, "http://gitlab.example/api/v4/#{source_entity_type}/#{source_entity_identifier}/export_relations/status?page=1&per_page=30&private_token=token")
.to_return(
status: 200
)
end
parent_group.add_owner(user)
end
it 'creates bulk import' do
expect { subject.execute }.to change { BulkImport.count }.by(1)
last_bulk_import = BulkImport.last
expect(last_bulk_import.user).to eq(user)
expect(last_bulk_import.source_version).to eq(source_version.to_s)
expect(last_bulk_import.user).to eq(user)
expect(last_bulk_import.source_enterprise).to eq(false)
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'bulk_import_group',
extra: { source_equals_destination: false }
)
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
)
end
context 'on the same instance' do
before do
allow(Settings.gitlab).to receive(:base_url).and_return('http://gitlab.example')
end
it 'tracks the same instance migration' do
expect { subject.execute }.to change { BulkImport.count }.by(1)
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'bulk_import_group',
extra: { source_equals_destination: true }
)
end
end
it 'creates bulk import entities' do
expect { subject.execute }.to change { BulkImports::Entity.count }.by(3)
end
it 'creates bulk import configuration' do
expect { subject.execute }.to change { BulkImports::Configuration.count }.by(1)
end
it 'enqueues BulkImportWorker' do
expect(BulkImportWorker).to receive(:perform_async)
subject.execute
end
it 'returns success ServiceResponse' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_success
end
it 'returns ServiceResponse with error if validation fails' do
params[0][:source_full_path] = nil
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message).to eq("Validation failed: Source full path can't be blank, " \
"Source full path must have a relative path structure with " \
"no HTTP protocol characters, or leading or trailing forward slashes. " \
"Path segments must not start or end with a special character, and " \
"must not contain consecutive special characters")
end
describe '#user-role' do
context 'when there is a parent_namespace and the user is a member' do
let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination200',
destination_namespace: 'parent-group'
}
]
end
it 'defines access_level from parent namespace membership' do
parent_group.add_guest(user)
subject.execute
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
)
end
end
it 'defines access_level as not a member' do
parent_group.members.delete_all
subject.execute
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Not a member', import_type: 'bulk_import_group' }
)
end
context 'when there is a destination_namespace but no parent_namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination-group-1',
destination_namespace: 'destination1'
}
]
end
it 'defines access_level from destination_namespace' do
destination_group.add_developer(user)
subject.execute
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Developer', import_type: 'bulk_import_group' }
)
end
end
context 'when there is no destination_namespace or parent_namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destinationational-mcdestiny',
destination_namespace: 'destinational-mcdestiny'
}
]
end
it 'defines access_level as owner' do
subject.execute
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
)
end
end
end
describe '#validate_setting_enabled!' do
let(:entity_source_id) { 'gid://gitlab/Model/12345' }
let(:graphql_client) { instance_double(BulkImports::Clients::Graphql) }
let(:http_client) { instance_double(BulkImports::Clients::HTTP) }
let(:http_response) { double(code: 200, success?: true) } # rubocop:disable RSpec/VerifiedDoubles
before do
allow(BulkImports::Clients::HTTP).to receive(:new).and_return(http_client)
allow(BulkImports::Clients::Graphql).to receive(:new).and_return(graphql_client)
allow(http_client).to receive(:instance_version).and_return(status: 200)
allow(http_client).to receive(:instance_enterprise).and_return(false)
allow(http_client).to receive(:validate_instance_version!).and_return(source_version)
allow(http_client).to receive(:validate_import_scopes!).and_return(true)
end
context 'when the source_type is a group' do
context 'when the source_full_path contains only integer characters' do
let(:query_string) { BulkImports::Groups::Graphql::GetGroupQuery.new(context: nil).to_s }
let(:graphql_response) do
double(original_hash: { 'data' => { 'group' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles
end
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: '67890',
destination_slug: 'destination-group-1',
destination_namespace: 'destination1'
}
]
end
before do
allow(graphql_client).to receive(:parse).with(query_string)
allow(graphql_client).to receive(:execute).and_return(graphql_response)
allow(http_client).to receive(:get)
.with("/groups/12345/export_relations/status")
.and_return(http_response)
stub_request(:get, "http://gitlab.example/api/v4/groups/12345/export_relations/status?page=1&per_page=30&private_token=token")
.to_return(status: 200, body: "", headers: {})
end
it 'makes a graphql request using the group full path and an http request with the correct id' do
expect(graphql_client).to receive(:parse).with(query_string)
expect(graphql_client).to receive(:execute).and_return(graphql_response)
expect(http_client).to receive(:get).with("/groups/12345/export_relations/status")
subject.execute
end
end
end
context 'when the source_type is a project' do
context 'when the source_full_path contains only integer characters' do
let(:query_string) { BulkImports::Projects::Graphql::GetProjectQuery.new(context: nil).to_s }
let(:graphql_response) do
double(original_hash: { 'data' => { 'project' => { 'id' => entity_source_id } } }) # rubocop:disable RSpec/VerifiedDoubles
end
let(:params) do
[
{
source_type: 'project_entity',
source_full_path: '67890',
destination_slug: 'destination-group-1',
destination_namespace: 'destination1'
}
]
end
before do
allow(graphql_client).to receive(:parse).with(query_string)
allow(graphql_client).to receive(:execute).and_return(graphql_response)
allow(http_client).to receive(:get)
.with("/projects/12345/export_relations/status")
.and_return(http_response)
stub_request(:get, "http://gitlab.example/api/v4/projects/12345/export_relations/status?page=1&per_page=30&private_token=token")
.to_return(status: 200, body: "", headers: {})
end
it 'makes a graphql request using the group full path and an http request with the correct id' do
expect(graphql_client).to receive(:parse).with(query_string)
expect(graphql_client).to receive(:execute).and_return(graphql_response)
expect(http_client).to receive(:get).with("/projects/12345/export_relations/status")
subject.execute
end
end
end
end
describe '#validate_destination_namespace' do
context 'when the destination_namespace does not exist' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'destination-slug',
destination_namespace: 'destination-namespace',
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq("Import failed. Destination 'destination-namespace' is invalid, " \
"or you don't have permission.")
end
end
context 'when the user does not have permission to create subgroups' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'destination-slug',
destination_namespace: parent_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
parent_group.members.delete_all
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq("Import failed. Destination '#{parent_group.path}' is invalid, " \
"or you don't have permission.")
end
end
context 'when the user does not have permission to create projects' do
let(:params) do
[
{
source_type: 'project_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'destination-slug',
destination_namespace: parent_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
parent_group.members.delete_all
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq("Import failed. Destination '#{parent_group.path}' is invalid, " \
"or you don't have permission.")
end
end
end
describe '#validate_destination_slug' do
context 'when the destination_slug is invalid' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'destin-*-ation-slug',
destination_namespace: parent_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Import failed. Destination URL " \
"must not start or end with a special character and must " \
"not contain consecutive special characters."
)
end
end
end
describe '#validate_destination_full_path' do
context 'when the source_type is a group' do
context 'when the provided destination_slug already exists in the destination_namespace' do
let_it_be(:existing_subgroup) { create(:group, path: 'existing-subgroup', parent_id: parent_group.id ) }
let_it_be(:existing_subgroup_2) { create(:group, path: 'existing-subgroup_2', parent_id: parent_group.id ) }
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: existing_subgroup.path,
destination_namespace: parent_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Import failed. 'parent-group/existing-subgroup' already exists. " \
"Change the destination and try again."
)
end
end
context 'when the destination_slug conflicts with an existing top-level namespace' do
let_it_be(:existing_top_level_group) { create(:group, path: 'top-level-group') }
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: existing_top_level_group.path,
destination_namespace: '',
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Import failed. 'top-level-group' already exists. " \
"Change the destination and try again."
)
end
end
context 'when the destination_slug does not conflict with an existing top-level namespace' do
let(:params) do
[
{
source_type: 'group_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'new-group',
destination_namespace: parent_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns success ServiceResponse' do
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_success
end
end
end
context 'when the source_type is a project' do
context 'when the provided destination_slug already exists in the destination_namespace' do
let_it_be(:existing_group) { create(:group, path: 'existing-group' ) }
let_it_be(:existing_project) { create(:project, path: 'existing-project', parent_id: existing_group.id ) }
let(:params) do
[
{
source_type: 'project_entity',
source_full_path: 'full/path/to/source',
destination_slug: existing_project.path,
destination_namespace: existing_group.path,
migrate_projects: migrate_projects
}
]
end
it 'returns ServiceResponse with an error message' do
existing_group.add_owner(user)
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_error
expect(result.message)
.to eq(
"Import failed. 'existing-group/existing-project' already exists. " \
"Change the destination and try again."
)
end
end
context 'when the destination_slug does not conflict with an existing project' do
let_it_be(:existing_group) { create(:group, path: 'existing-group' ) }
let(:params) do
[
{
source_type: 'project_entity',
source_full_path: 'full/path/to/source',
destination_slug: 'new-project',
destination_namespace: 'existing-group',
migrate_projects: migrate_projects
}
]
end
it 'returns success ServiceResponse' do
existing_group.add_owner(user)
result = subject.execute
expect(result).to be_a(ServiceResponse)
expect(result).to be_success
end
end
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class RelationExportService
include Gitlab::ImportExport::CommandLineUtil
EXISTING_EXPORT_TTL = 3.minutes
def initialize(user, portable, relation, jid)
@user = user
@portable = portable
@relation = relation
@jid = jid
@config = FileTransfer.config_for(portable)
end
def execute
find_or_create_export! do |export|
export.remove_existing_upload!
export_service.execute
ensure_export_file_exists!
compress_exported_relation
upload_compressed_file(export)
end
ensure
FileUtils.remove_entry(export_path)
end
private
attr_reader :user, :portable, :relation, :jid, :config
delegate :export_path, to: :config
def find_or_create_export!
export = portable.bulk_import_exports.safe_find_or_create_by!(relation: relation)
return export if export.finished? && export.updated_at > EXISTING_EXPORT_TTL.ago && !export.batched?
start_export!(export)
yield export
finish_export!(export)
end
def export_service
@export_service ||= if config.tree_relation?(relation) || config.self_relation?(relation)
TreeExportService.new(portable, export_path, relation, user)
elsif config.file_relation?(relation)
FileExportService.new(portable, export_path, relation, user)
else
raise BulkImports::Error, 'Unsupported export relation'
end
end
def upload_compressed_file(export)
compressed_file = File.join(export_path, "#{export_service.exported_filename}.gz")
upload = ExportUpload.find_or_initialize_by(export_id: export.id) # rubocop: disable CodeReuse/ActiveRecord
File.open(compressed_file) { |file| upload.export_file = file }
upload.save!
end
def compress_exported_relation
gzip(dir: export_path, filename: export_service.exported_filename)
end
def start_export!(export)
export.update!(
status_event: 'start',
jid: jid,
batched: false,
batches_count: 0,
total_objects_count: 0,
error: nil
)
export.batches.destroy_all if export.batches.any? # rubocop:disable Cop/DestroyAll
end
def finish_export!(export)
export.update!(status_event: 'finish', batched: false, error: nil)
end
def exported_filepath
File.join(export_path, export_service.exported_filename)
end
# Create empty file on disk
# if relation is empty and nothing was exported
def ensure_export_file_exists!
FileUtils.touch(exported_filepath)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::RelationExportService, feature_category: :importers do
let_it_be(:jid) { 'jid' }
let_it_be(:relation) { 'labels' }
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project) }
let_it_be(:label) { create(:group_label, group: group) }
let_it_be(:export_path) { "#{Dir.tmpdir}/relation_export_service_spec/tree" }
let_it_be_with_reload(:export) { create(:bulk_import_export, group: group, relation: relation) }
before do
FileUtils.mkdir_p(export_path)
group.add_owner(user)
project.add_maintainer(user)
allow(subject).to receive(:export_path).and_return(export_path)
end
after :all do
FileUtils.rm_rf(export_path)
end
subject { described_class.new(user, group, relation, jid) }
describe '#execute' do
it 'exports specified relation and marks export as finished' do
expect_next_instance_of(BulkImports::TreeExportService) do |service|
expect(service).to receive(:execute).and_call_original
end
subject.execute
expect(export.reload.upload.export_file).to be_present
expect(export.finished?).to eq(true)
expect(export.batched?).to eq(false)
expect(export.batches_count).to eq(0)
expect(export.batches.count).to eq(0)
expect(export.total_objects_count).to eq(0)
end
it 'removes temp export files' do
subject.execute
expect(Dir.exist?(export_path)).to eq(false)
end
it 'exports specified relation and marks export as finished' do
subject.execute
expect(export.upload.export_file).to be_present
end
context 'when relation is empty and there is nothing to export' do
let(:relation) { 'milestones' }
it 'creates empty file on disk' do
expect(FileUtils).to receive(:touch).with("#{export_path}/#{relation}.ndjson").and_call_original
subject.execute
end
end
context 'when exporting a file relation' do
it 'uses file export service' do
service = described_class.new(user, project, 'uploads', jid)
expect_next_instance_of(BulkImports::FileExportService) do |service|
expect(service).to receive(:execute)
end
service.execute
end
end
context 'when export record does not exist' do
let(:another_group) { create(:group) }
subject { described_class.new(user, another_group, relation, jid) }
it 'creates export record' do
another_group.add_owner(user)
expect { subject.execute }
.to change { another_group.bulk_import_exports.count }
.from(0)
.to(1)
end
end
context 'when there is existing export present' do
let(:upload) { create(:bulk_import_export_upload, export: export) }
it 'removes existing export before exporting' do
upload.update!(export_file: fixture_file_upload('spec/fixtures/bulk_imports/gz/labels.ndjson.gz'))
expect_any_instance_of(BulkImports::ExportUpload) do |upload|
expect(upload).to receive(:remove_export_file!)
end
subject.execute
end
context 'when export is recently finished' do
it 'returns recently finished export instead of re-exporting' do
updated_at = 5.seconds.ago
export.update!(status: 1, updated_at: updated_at)
expect { subject.execute }.not_to change { export.updated_at }
expect(export.status).to eq(1)
expect(export.updated_at).to eq(updated_at)
end
end
end
context 'when export was batched' do
let(:relation) { 'milestones' }
let(:export) { create(:bulk_import_export, group: group, relation: relation, batched: true, batches_count: 2) }
it 'removes existing batches and marks export as not batched' do
create(:bulk_import_export_batch, batch_number: 1, export: export)
create(:bulk_import_export_batch, batch_number: 2, export: export)
expect { described_class.new(user, group, relation, jid).execute }
.to change { export.reload.batches.count }
.from(2)
.to(0)
expect(export.batched?).to eq(false)
expect(export.batches_count).to eq(0)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class GetImportableDataService
def initialize(params, query_params, credentials)
@params = params
@query_params = query_params
@credentials = credentials
end
def execute
{
version_validation: version_validation,
response: importables
}
end
private
def importables
client.get('groups', @query_params)
end
def version_validation
{
features: {
project_migration: {
available: client.compatible_for_project_migration?,
min_version: BulkImport.min_gl_version_for_project_migration.to_s
},
source_instance_version: client.instance_version.to_s
}
}
end
def client
@client ||= BulkImports::Clients::HTTP.new(
url: @credentials[:url],
token: @credentials[:access_token],
per_page: @params[:per_page],
page: @params[:page]
)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::GetImportableDataService, feature_category: :importers do
describe '#execute' do
include_context 'bulk imports requests context', 'https://gitlab.example.com'
let_it_be(:params) { { per_page: 20, page: 1 } }
let_it_be(:query_params) { { top_level_only: true, min_access_level: 50, search: '' } }
let_it_be(:credentials) { { url: 'https://gitlab.example.com', access_token: 'demo-pat' } }
let_it_be(:expected_version_validation) do
{
features: {
project_migration: {
available: true,
min_version: BulkImport.min_gl_version_for_project_migration.to_s
},
'source_instance_version': BulkImport.min_gl_version_for_project_migration.to_s
}
}
end
let_it_be(:expected_parsed_response) do
[
{
'id' => 2595438,
'web_url' => 'https://gitlab.com/groups/auto-breakfast',
'name' => 'Stub',
'path' => 'stub-group',
'full_name' => 'Stub',
'full_path' => 'stub-group'
}
]
end
let(:source_version) do
Gitlab::VersionInfo.new(
::BulkImport::MIN_MAJOR_VERSION,
::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT
)
end
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
allow(instance).to receive(:instance_version).and_return(source_version)
allow(instance).to receive(:instance_enterprise).and_return(false)
end
end
subject do
described_class.new(params, query_params, credentials).execute
end
it 'returns version_validation and a response' do
expect(subject[:version_validation]).to eq(expected_version_validation)
expect(subject[:response].parsed_response).to eq(expected_parsed_response)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# File Decompression Service allows gzipped files decompression into tmp directory.
#
# @param tmpdir [String] Temp directory to store downloaded file to. Must be located under `Dir.tmpdir`.
# @param filename [String] Name of the file to decompress.
module BulkImports
class FileDecompressionService
include Gitlab::ImportExport::CommandLineUtil
ServiceError = Class.new(StandardError)
def initialize(tmpdir:, filename:)
@tmpdir = tmpdir
@filename = filename
@filepath = File.join(@tmpdir, @filename)
@decompressed_filename = File.basename(@filename, '.gz')
@decompressed_filepath = File.join(@tmpdir, @decompressed_filename)
end
def execute
validate_tmpdir
validate_filepath
validate_decompressed_file_size if Feature.enabled?(:validate_import_decompressed_archive_size)
validate_symlink(filepath)
decompress_file
validate_symlink(decompressed_filepath)
filepath
rescue StandardError => e
File.delete(filepath) if File.exist?(filepath)
File.delete(decompressed_filepath) if File.exist?(decompressed_filepath)
raise e
end
private
attr_reader :tmpdir, :filename, :filepath, :decompressed_filename, :decompressed_filepath
def validate_filepath
Gitlab::PathTraversal.check_path_traversal!(filepath)
end
def validate_tmpdir
Gitlab::PathTraversal.check_allowed_absolute_path!(tmpdir, [Dir.tmpdir])
end
def validate_decompressed_file_size
raise(ServiceError, 'File decompression error') unless size_validator.valid?
end
def validate_symlink(filepath)
raise(ServiceError, 'Invalid file') if Gitlab::Utils::FileInfo.linked?(filepath)
end
def decompress_file
gunzip(dir: tmpdir, filename: filename)
end
def size_validator
@size_validator ||= Gitlab::ImportExport::DecompressedArchiveSizeValidator.new(archive_path: filepath)
end
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::FileDecompressionService, feature_category: :importers do
using RSpec::Parameterized::TableSyntax
let_it_be(:tmpdir) { Dir.mktmpdir }
let_it_be(:ndjson_filename) { 'labels.ndjson' }
let_it_be(:ndjson_filepath) { File.join(tmpdir, ndjson_filename) }
let_it_be(:gz_filename) { "#{ndjson_filename}.gz" }
let_it_be(:gz_filepath) { "spec/fixtures/bulk_imports/gz/#{gz_filename}" }
before do
FileUtils.copy_file(gz_filepath, File.join(tmpdir, gz_filename))
FileUtils.remove_entry(ndjson_filepath) if File.exist?(ndjson_filepath)
end
after(:all) do
FileUtils.remove_entry(tmpdir)
end
subject { described_class.new(tmpdir: tmpdir, filename: gz_filename) }
describe '#execute' do
it 'decompresses specified file' do
subject.execute
expect(File.exist?(File.join(tmpdir, ndjson_filename))).to eq(true)
expect(File.open(ndjson_filepath, &:readline)).to include('title', 'description')
end
context 'when validate_import_decompressed_archive_size feature flag is enabled' do
before do
stub_feature_flags(validate_import_decompressed_archive_size: true)
end
it 'performs decompressed file size validation' do
expect_next_instance_of(Gitlab::ImportExport::DecompressedArchiveSizeValidator) do |validator|
expect(validator).to receive(:valid?).and_return(true)
end
subject.execute
end
end
context 'when validate_import_decompressed_archive_size feature flag is disabled' do
before do
stub_feature_flags(validate_import_decompressed_archive_size: false)
end
it 'does not perform decompressed file size validation' do
expect(Gitlab::ImportExport::DecompressedArchiveSizeValidator).not_to receive(:new)
subject.execute
end
end
context 'when dir is not in tmpdir' do
subject { described_class.new(tmpdir: '/etc', filename: 'filename') }
it 'raises an error' do
expect { subject.execute }.to raise_error(StandardError, 'path /etc is not allowed')
end
end
context 'when path is being traversed' do
subject { described_class.new(tmpdir: File.join(Dir.mktmpdir, 'test', '..'), filename: 'filename') }
it 'raises an error' do
expect { subject.execute }.to raise_error(Gitlab::PathTraversal::PathTraversalAttackError, 'Invalid path')
end
end
shared_examples 'raises an error and removes the file' do |error_message:|
specify do
expect { subject.execute }
.to raise_error(BulkImports::FileDecompressionService::ServiceError, error_message)
expect(File).not_to exist(file)
end
end
shared_context 'when compressed file' do
let_it_be(:file) { File.join(tmpdir, 'file.gz') }
subject { described_class.new(tmpdir: tmpdir, filename: 'file.gz') }
before do
FileUtils.send(link_method, File.join(tmpdir, gz_filename), file)
end
end
shared_context 'when decompressed file' do
let_it_be(:file) { File.join(tmpdir, 'file.txt') }
subject { described_class.new(tmpdir: tmpdir, filename: gz_filename) }
before do
original_file = File.join(tmpdir, 'original_file.txt')
FileUtils.touch(original_file)
FileUtils.send(link_method, original_file, file)
subject.instance_variable_set(:@decompressed_filepath, file)
end
end
context 'when compressed file is a symlink' do
let(:link_method) { :symlink }
include_context 'when compressed file'
include_examples 'raises an error and removes the file', error_message: 'File decompression error'
end
context 'when compressed file shares multiple hard links' do
let(:link_method) { :link }
include_context 'when compressed file'
include_examples 'raises an error and removes the file', error_message: 'File decompression error'
end
context 'when decompressed file is a symlink' do
let(:link_method) { :symlink }
include_context 'when decompressed file'
include_examples 'raises an error and removes the file', error_message: 'Invalid file'
end
context 'when decompressed file shares multiple hard links' do
let(:link_method) { :link }
include_context 'when decompressed file'
include_examples 'raises an error and removes the file', error_message: 'Invalid file'
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module BulkImports
class TreeExportService
include Gitlab::Utils::StrongMemoize
delegate :exported_objects_count, to: :serializer
def initialize(portable, export_path, relation, user)
@portable = portable
@export_path = export_path
@relation = relation
@config = FileTransfer.config_for(portable)
@user = user
end
def execute
if self_relation?(relation)
serializer.serialize_root(config.class::SELF_RELATION)
else
serializer.serialize_relation(relation_definition)
end
end
def export_batch(ids)
serializer.serialize_relation(relation_definition, batch_ids: Array.wrap(ids))
end
def exported_filename
"#{relation}.#{extension}"
end
private
delegate :self_relation?, to: :config
attr_reader :export_path, :portable, :relation, :config, :user
# rubocop: disable CodeReuse/Serializer
def serializer
@serializer ||= ::Gitlab::ImportExport::Json::StreamingSerializer.new(
portable,
config.portable_tree,
::Gitlab::ImportExport::Json::NdjsonWriter.new(export_path),
exportable_path: '',
current_user: user
)
end
# rubocop: enable CodeReuse/Serializer
def extension
return 'json' if self_relation?(relation)
'ndjson'
end
def relation_definition
definition = config.tree_relation_definition_for(relation)
raise BulkImports::Error, 'Unsupported relation export type' unless definition
definition
end
strong_memoize_attr :relation_definition
end
end
``` | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe BulkImports::TreeExportService, feature_category: :importers do
let_it_be(:project) { create(:project) }
let_it_be(:export_path) { Dir.mktmpdir }
let(:relation) { 'issues' }
subject(:service) { described_class.new(project, export_path, relation, project.owner) }
describe '#execute' do
it 'executes export service and archives exported data' do
expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
expect(serializer).to receive(:serialize_relation)
end
subject.execute
end
context 'when unsupported relation is passed' do
it 'raises an error' do
service = described_class.new(project, export_path, 'unsupported', project.owner)
expect { service.execute }.to raise_error(BulkImports::Error, 'Unsupported relation export type')
end
end
context 'when relation is self' do
let(:relation) { 'self' }
it 'executes export on portable itself' do
expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
expect(serializer).to receive(:serialize_root)
end
subject.execute
end
end
end
describe '#exported_filename' do
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('issues.ndjson')
end
context 'when relation is self' do
let(:relation) { 'self' }
it 'returns filename of the exported file' do
expect(subject.exported_filename).to eq('self.json')
end
end
end
describe '#export_batch' do
it 'serializes relation with specified ids' do
expect_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
expect(serializer).to receive(:serialize_relation).with(anything, batch_ids: [1, 2, 3])
end
subject.export_batch([1, 2, 3])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
TopicStatusUpdater =
Struct.new(:topic, :user) do
def update!(status, enabled, opts = {})
status = Status.new(status, enabled)
@topic_timer = topic.public_topic_timer
updated = nil
Topic.transaction do
updated = change(status, opts)
if updated
highest_post_number = topic.highest_post_number
create_moderator_post_for(status, opts)
update_read_state_for(status, highest_post_number)
end
end
updated
end
private
def change(status, opts = {})
result = true
if status.pinned? || status.pinned_globally?
topic.update_pinned(status.enabled?, status.pinned_globally?, opts[:until])
elsif status.autoclosed?
rc = Topic.where(id: topic.id, closed: !status.enabled?).update_all(closed: status.enabled?)
topic.closed = status.enabled?
result = false if rc == 0
else
rc =
Topic.where(:id => topic.id, status.name => !status.enabled).update_all(
status.name => status.enabled?,
)
topic.public_send("#{status.name}=", status.enabled?)
result = false if rc == 0
end
DiscourseEvent.trigger(:topic_closed, topic) if status.manually_closing_topic?
if status.visible? && status.disabled?
UserProfile.remove_featured_topic_from_all_profiles(topic)
end
if status.visible? && result
topic.update_category_topic_count_by(status.enabled? ? 1 : -1)
UserStatCountUpdater.public_send(
status.enabled? ? :increment! : :decrement!,
topic.first_post,
)
end
if @topic_timer
if status.manually_closing_topic? || status.closing_topic?
topic.delete_topic_timer(TopicTimer.types[:close])
topic.delete_topic_timer(TopicTimer.types[:silent_close])
elsif status.manually_opening_topic? || status.opening_topic?
topic.delete_topic_timer(TopicTimer.types[:open])
topic.inherit_auto_close_from_category
end
end
# remove featured topics if we close/archive/make them invisible. Previously we used
# to run the whole featuring logic but that could be very slow and have concurrency
# errors on large sites with many autocloses and topics being created.
if (
(status.enabled? && (status.autoclosed? || status.closed? || status.archived?)) ||
(status.disabled? && status.visible?)
)
CategoryFeaturedTopic.where(topic_id: topic.id).delete_all
end
result
end
def create_moderator_post_for(status, opts)
message = opts[:message]
topic.add_moderator_post(user, message || message_for(status), options_for(status, opts))
topic.reload
end
def update_read_state_for(status, old_highest_read)
if status.autoclosed? && status.enabled?
# let's pretend all the people that read up to the autoclose message
# actually read the topic
PostTiming.pretend_read(topic.id, old_highest_read, topic.highest_post_number)
end
end
def message_for(status)
if status.autoclosed?
locale_key = status.locale_key.dup
locale_key << "_lastpost" if @topic_timer&.based_on_last_post
message_for_autoclosed(locale_key)
end
end
def message_for_autoclosed(locale_key)
num_minutes =
if @topic_timer&.based_on_last_post
(@topic_timer.duration_minutes || 0).minutes.to_i
elsif @topic_timer&.created_at
Time.zone.now - @topic_timer.created_at
else
Time.zone.now - topic.created_at
end
# all of the results above are in seconds, this brings them
# back to the actual minutes integer
num_minutes = (num_minutes / 1.minute).round
if num_minutes.minutes >= 2.days
I18n.t("#{locale_key}_days", count: (num_minutes.minutes / 1.day).round)
else
num_hours = (num_minutes.minutes / 1.hour).round
if num_hours >= 2
I18n.t("#{locale_key}_hours", count: num_hours)
else
I18n.t("#{locale_key}_minutes", count: num_minutes)
end
end
end
def options_for(status, opts = {})
{
bump: status.opening_topic?,
post_type: Post.types[:small_action],
silent: opts[:silent],
action_code: status.action_code,
}
end
Status =
Struct.new(:name, :enabled) do
%w[pinned_globally pinned autoclosed closed visible archived].each do |status|
define_method("#{status}?") { name == status }
end
def enabled?
enabled
end
def disabled?
!enabled?
end
def action_code
"#{name}.#{enabled? ? "enabled" : "disabled"}"
end
def locale_key
"topic_statuses.#{action_code.tr(".", "_")}"
end
def opening_topic?
(closed? || autoclosed?) && disabled?
end
def closing_topic?
(closed? || autoclosed?) && enabled?
end
def manually_closing_topic?
closed? && enabled?
end
def manually_opening_topic?
closed? && disabled?
end
end
end
``` | # encoding: UTF-8
# frozen_string_literal: true
# TODO - test pinning, create_moderator_post
RSpec.describe TopicStatusUpdater do
fab!(:user)
fab!(:admin)
it "avoids notifying on automatically closed topics" do
# TODO: TopicStatusUpdater should suppress message bus updates from the users it "pretends to read"
post =
PostCreator.create(
user,
raw: "this is a test post 123 this is a test post",
title: "hello world title",
)
# TODO needed so counts sync up, PostCreator really should not give back out-of-date Topic
post.topic.set_or_create_timer(TopicTimer.types[:close], "10")
post.topic.reload
TopicStatusUpdater.new(post.topic, admin).update!("autoclosed", true)
expect(post.topic.posts.count).to eq(2)
tu = TopicUser.find_by(user_id: user.id)
expect(tu.last_read_post_number).to eq(2)
end
it "adds an autoclosed message" do
topic = create_topic
topic.set_or_create_timer(TopicTimer.types[:close], "10")
TopicStatusUpdater.new(topic, admin).update!("autoclosed", true)
last_post = topic.posts.last
expect(last_post.post_type).to eq(Post.types[:small_action])
expect(last_post.action_code).to eq("autoclosed.enabled")
expect(last_post.raw).to eq(I18n.t("topic_statuses.autoclosed_enabled_minutes", count: 0))
end
it "triggers a DiscourseEvent on close" do
topic = create_topic
called = false
updater = ->(_) { called = true }
DiscourseEvent.on(:topic_closed, &updater)
TopicStatusUpdater.new(topic, admin).update!("closed", true)
DiscourseEvent.off(:topic_closed, &updater)
expect(topic).to be_closed
expect(called).to eq(true)
end
it "adds an autoclosed message based on last post" do
topic = create_topic
Fabricate(:post, topic: topic)
topic.set_or_create_timer(
TopicTimer.types[:close],
nil,
based_on_last_post: true,
duration_minutes: 600,
)
TopicStatusUpdater.new(topic, admin).update!("autoclosed", true)
last_post = topic.posts.last
expect(last_post.post_type).to eq(Post.types[:small_action])
expect(last_post.action_code).to eq("autoclosed.enabled")
expect(last_post.raw).to eq(
I18n.t("topic_statuses.autoclosed_enabled_lastpost_hours", count: 10),
)
end
describe "opening the topic" do
it "opens the topic and deletes the timer" do
topic = create_topic
topic.set_or_create_timer(TopicTimer.types[:open], 10.hours.from_now)
TopicStatusUpdater.new(topic, admin).update!("closed", false)
timer = TopicTimer.find_by(topic: topic)
expect(timer).to eq(nil)
end
context "when the category has auto close settings" do
let(:topic) { create_topic }
let(:based_on_last_post) { false }
before do
# auto close after 3 days, topic was created a day ago
topic.update(
category:
Fabricate(
:category,
auto_close_hours: 72,
auto_close_based_on_last_post: based_on_last_post,
),
created_at: 1.day.ago,
)
end
it "inherits auto close from the topic category, based on the created_at date of the topic" do
# close the topic manually, and set a timer to automatically open
TopicStatusUpdater.new(topic, admin).update!("closed", true)
topic.set_or_create_timer(TopicTimer.types[:open], 10.hours.from_now)
# manually open the topic. it has been 1 days since creation so the
# topic should auto-close 2 days from now, the original auto close time
TopicStatusUpdater.new(topic, admin).update!("closed", false)
timer = TopicTimer.find_by(topic: topic)
expect(timer).not_to eq(nil)
expect(timer.execute_at).to be_within_one_second_of(topic.created_at + 72.hours)
end
it "does not inherit auto close from the topic category if it has already been X hours since topic creation" do
topic.category.update(auto_close_hours: 1)
# close the topic manually, and set a timer to automatically open
TopicStatusUpdater.new(topic, admin).update!("closed", true)
topic.set_or_create_timer(TopicTimer.types[:open], 10.hours.from_now)
# manually open the topic. it has been over a day since creation and
# the auto close hours was 1 so a new timer should not be made
TopicStatusUpdater.new(topic, admin).update!("closed", false)
timer = TopicTimer.find_by(topic: topic)
expect(timer).to eq(nil)
end
context "when category setting is based_on_last_post" do
let(:based_on_last_post) { true }
it "inherits auto close from the topic category, using the duration because the close is based_on_last_post" do
# close the topic manually, and set a timer to automatically open
TopicStatusUpdater.new(topic, admin).update!("closed", true)
topic.set_or_create_timer(TopicTimer.types[:open], 10.hours.from_now)
# manually open the topic. it should re open 3 days from now, NOT
# 3 days from creation
TopicStatusUpdater.new(topic, admin).update!("closed", false)
timer = TopicTimer.find_by(topic: topic)
expect(timer).not_to eq(nil)
expect(timer.duration_minutes).to eq(72 * 60)
expect(timer.execute_at).to be_within_one_second_of(Time.zone.now + 72.hours)
end
end
end
end
describe "repeat actions" do
shared_examples "an action that doesn't repeat" do
it "does not perform the update twice" do
topic = Fabricate(:topic, status_name => false)
updated = TopicStatusUpdater.new(topic, admin).update!(status_name, true)
expect(updated).to eq(true)
expect(topic.public_send("#{status_name}?")).to eq(true)
updated = TopicStatusUpdater.new(topic, admin).update!(status_name, true)
expect(updated).to eq(false)
expect(topic.posts.where(post_type: Post.types[:small_action]).count).to eq(1)
updated = TopicStatusUpdater.new(topic, admin).update!(status_name, false)
expect(updated).to eq(true)
expect(topic.public_send("#{status_name}?")).to eq(false)
updated = TopicStatusUpdater.new(topic, admin).update!(status_name, false)
expect(updated).to eq(false)
expect(topic.posts.where(post_type: Post.types[:small_action]).count).to eq(2)
end
end
it_behaves_like "an action that doesn't repeat" do
let(:status_name) { "closed" }
end
it_behaves_like "an action that doesn't repeat" do
let(:status_name) { "visible" }
end
it_behaves_like "an action that doesn't repeat" do
let(:status_name) { "archived" }
end
it "updates autoclosed" do
topic = Fabricate(:topic)
updated = TopicStatusUpdater.new(topic, admin).update!("autoclosed", true)
expect(updated).to eq(true)
expect(topic.closed?).to eq(true)
updated = TopicStatusUpdater.new(topic, admin).update!("autoclosed", true)
expect(updated).to eq(false)
expect(topic.posts.where(post_type: Post.types[:small_action]).count).to eq(1)
updated = TopicStatusUpdater.new(topic, admin).update!("autoclosed", false)
expect(updated).to eq(true)
expect(topic.closed?).to eq(false)
updated = TopicStatusUpdater.new(topic, admin).update!("autoclosed", false)
expect(updated).to eq(false)
expect(topic.posts.where(post_type: Post.types[:small_action]).count).to eq(2)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class NotificationEmailer
class EmailUser
attr_reader :notification, :no_delay
def initialize(notification, no_delay: false)
@notification = notification
@no_delay = no_delay
end
def group_mentioned
enqueue :group_mentioned
end
def mentioned
enqueue :user_mentioned
end
def posted
enqueue :user_posted
end
def watching_category_or_tag
enqueue :user_posted
end
def quoted
enqueue :user_quoted
end
def replied
enqueue :user_replied
end
def linked
enqueue :user_linked
end
def watching_first_post
enqueue :user_watching_first_post
end
def post_approved
enqueue :post_approved
end
def private_message
enqueue_private(:user_private_message)
end
def invited_to_private_message
enqueue(:user_invited_to_private_message, private_delay)
end
def invited_to_topic
enqueue(:user_invited_to_topic, private_delay)
end
def self.notification_params(notification, type)
post_id = (notification.data_hash[:original_post_id] || notification.post_id).to_i
notification_type = Notification.types[notification.notification_type]
hash = {
type: type.to_s,
user_id: notification.user_id,
notification_id: notification.id,
notification_data_hash: notification.data_hash,
notification_type: notification_type.to_s,
}
hash[:post_id] = post_id if post_id > 0 && notification_type != :post_approved
hash
end
private
EMAILABLE_POST_TYPES ||= Set.new [Post.types[:regular], Post.types[:whisper]]
def enqueue(type, delay = default_delay)
return if notification.user.user_option.email_level == UserOption.email_level_types[:never]
perform_enqueue(type, delay)
end
def enqueue_private(type, delay = private_delay)
if notification.user.user_option.nil?
# this can happen if we roll back user creation really early
# or delete user
# bypass this pm
return
end
if notification.user.user_option.email_messages_level == UserOption.email_level_types[:never]
return
end
perform_enqueue(type, delay)
end
def perform_enqueue(type, delay)
user = notification.user
return unless user.active? || user.staged?
return if SiteSetting.must_approve_users? && !user.approved? && !user.staged?
return if user.staged? && (type == :user_linked || type == :user_quoted)
return unless EMAILABLE_POST_TYPES.include?(post_type)
Jobs.enqueue_in(delay, :user_email, self.class.notification_params(notification, type))
end
def default_delay
no_delay ? 0 : SiteSetting.email_time_window_mins.minutes
end
def private_delay
no_delay ? 0 : SiteSetting.personal_email_time_window_seconds
end
def post_type
@post_type ||=
begin
type = notification.data_hash["original_post_type"] if notification.data_hash
type ||= notification.post.try(:post_type)
type
end
end
end
def self.disable
@disabled = true
end
def self.enable
@disabled = false
end
def self.process_notification(notification, no_delay: false)
return if @disabled
email_user = EmailUser.new(notification, no_delay: no_delay)
email_method = Notification.types[notification.notification_type]
DiscoursePluginRegistry.email_notification_filters.each do |filter|
return unless filter.call(notification)
end
email_user.public_send(email_method) if email_user.respond_to? email_method
end
end
``` | # frozen_string_literal: true
RSpec.describe NotificationEmailer do
before do
freeze_time
NotificationEmailer.enable
end
fab!(:topic)
fab!(:post) { Fabricate(:post, topic: topic) }
# something is off with fabricator
def create_notification(type, user = nil)
user ||= Fabricate(:user)
Notification.create(
data: "{\"a\": 1}",
user: user,
notification_type: Notification.types[type],
topic: topic,
post_number: post.post_number,
)
end
shared_examples "enqueue" do
it "enqueues a job for the email" do
expect_enqueued_with(
job: :user_email,
args: NotificationEmailer::EmailUser.notification_params(notification, type),
at: no_delay ? Time.zone.now : Time.zone.now + delay,
) { NotificationEmailer.process_notification(notification, no_delay: no_delay) }
end
context "with an inactive user" do
before { notification.user.active = false }
it "doesn't enqueue a job" do
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
end
it "enqueues a job if the user is staged for non-linked and non-quoted types" do
notification.user.staged = true
if type == :user_linked || type == :user_quoted
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
else
expect_enqueued_with(
job: :user_email,
args: NotificationEmailer::EmailUser.notification_params(notification, type),
at: no_delay ? Time.zone.now : Time.zone.now + delay,
) { NotificationEmailer.process_notification(notification, no_delay: no_delay) }
end
end
it "enqueues a job if the user is staged even if site requires user approval for non-linked and non-quoted typed" do
notification.user.staged = true
SiteSetting.must_approve_users = true
if type == :user_linked || type == :user_quoted
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
else
expect_enqueued_with(
job: :user_email,
args: NotificationEmailer::EmailUser.notification_params(notification, type),
at: no_delay ? Time.zone.now : Time.zone.now + delay,
) { NotificationEmailer.process_notification(notification, no_delay: no_delay) }
end
end
end
context "with an active but unapproved user" do
before do
SiteSetting.must_approve_users = true
notification.user.approved = false
notification.user.active = true
end
it "doesn't enqueue a job" do
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
end
end
context "with a small action" do
it "doesn't enqueue a job" do
Post.any_instance.expects(:post_type).returns(Post.types[:small_action])
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
end
end
end
shared_examples "enqueue_public" do
include_examples "enqueue"
it "doesn't enqueue a job if the user has mention emails disabled" do
notification.user.user_option.update_columns(
email_level: UserOption.email_level_types[:never],
)
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
end
end
shared_examples "enqueue_private" do
include_examples "enqueue"
it "doesn't enqueue a job if the user has private message emails disabled" do
notification.user.user_option.update_columns(
email_messages_level: UserOption.email_level_types[:never],
)
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification)
end
end
end
[true, false].each do |no_delay|
context "with user_mentioned" do
let(:no_delay) { no_delay }
let(:type) { :user_mentioned }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:mentioned) }
include_examples "enqueue_public"
it "enqueue a delayed job for users that are online" do
notification.user.last_seen_at = 1.minute.ago
expect_enqueued_with(
job: :user_email,
args: NotificationEmailer::EmailUser.notification_params(notification, type),
at: Time.zone.now + delay,
) { NotificationEmailer.process_notification(notification) }
end
end
context "with user_replied" do
let(:no_delay) { no_delay }
let(:type) { :user_replied }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:replied) }
include_examples "enqueue_public"
end
context "with user_quoted" do
let(:no_delay) { no_delay }
let(:type) { :user_quoted }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:quoted) }
include_examples "enqueue_public"
end
context "with user_linked" do
let(:no_delay) { no_delay }
let(:type) { :user_linked }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:linked) }
include_examples "enqueue_public"
end
context "with user_posted" do
let(:no_delay) { no_delay }
let(:type) { :user_posted }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:posted) }
include_examples "enqueue_public"
end
context "with user_watching_category_or_tag" do
let(:no_delay) { no_delay }
let(:type) { :user_posted }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:watching_category_or_tag) }
include_examples "enqueue_public"
end
context "with user_private_message" do
let(:no_delay) { no_delay }
let(:type) { :user_private_message }
let(:delay) { SiteSetting.personal_email_time_window_seconds }
let!(:notification) { create_notification(:private_message) }
include_examples "enqueue_private"
it "doesn't enqueue a job for a small action" do
notification.data_hash["original_post_type"] = Post.types[:small_action]
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification)
end
end
end
context "with user_invited_to_private_message" do
let(:no_delay) { no_delay }
let(:type) { :user_invited_to_private_message }
let(:delay) { SiteSetting.personal_email_time_window_seconds }
let!(:notification) { create_notification(:invited_to_private_message) }
include_examples "enqueue_public"
end
context "with user_invited_to_topic" do
let(:no_delay) { no_delay }
let(:type) { :user_invited_to_topic }
let(:delay) { SiteSetting.personal_email_time_window_seconds }
let!(:notification) { create_notification(:invited_to_topic) }
include_examples "enqueue_public"
end
context "when watching the first post" do
let(:no_delay) { no_delay }
let(:type) { :user_watching_first_post }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:watching_first_post) }
include_examples "enqueue_public"
end
context "with post_approved" do
let(:no_delay) { no_delay }
let(:type) { :post_approved }
let(:delay) { SiteSetting.email_time_window_mins.minutes }
let!(:notification) { create_notification(:post_approved) }
include_examples "enqueue_public"
end
end
it "has translations for each sendable notification type" do
notification = create_notification(:mentioned)
email_user = NotificationEmailer::EmailUser.new(notification, no_delay: true)
subkeys = %w[title subject_template text_body_template]
# some notification types need special handling
replace_keys = {
"post_approved" => ["post_approved"],
"private_message" => ["user_posted"],
"invited_to_private_message" => %w[
user_invited_to_private_message_pm
user_invited_to_private_message_pm_group
user_invited_to_private_message_pm_staged
],
}
Notification.types.keys.each do |notification_type|
if email_user.respond_to?(notification_type)
type_keys = replace_keys[notification_type.to_s] || ["user_#{notification_type}"]
type_keys.each do |type_key|
subkeys.each do |subkey|
key = "user_notifications.#{type_key}.#{subkey}"
expect(I18n.exists?(key)).to eq(true), "missing translation: #{key}"
end
end
end
end
end
describe "with plugin-added email_notification_filters" do
let!(:plugin) { Plugin::Instance.new }
let!(:notification) { create_notification(:quoted) }
let(:no_delay) { true }
let(:type) { :user_quoted }
after { DiscoursePluginRegistry.reset! }
it "sends email when all filters return true" do
plugin.register_email_notification_filter { |_| true }
plugin.register_email_notification_filter { |_| true }
expect_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
end
it "doesn't send email when all one filter returns false" do
plugin.register_email_notification_filter { |_| true }
plugin.register_email_notification_filter { |_| false }
expect_not_enqueued_with(job: :user_email, args: { type: type }) do
NotificationEmailer.process_notification(notification, no_delay: no_delay)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class WordWatcher
REPLACEMENT_LETTER ||= CGI.unescape_html("■")
CACHE_VERSION ||= 3
def initialize(raw)
@raw = raw
end
@cache_enabled = true
def self.disable_cache
@cache_enabled = false
end
def self.cache_enabled?
@cache_enabled
end
def self.cache_key(action)
"watched-words-list:v#{CACHE_VERSION}:#{action}"
end
def self.clear_cache!
WatchedWord.actions.each { |action, _| Discourse.cache.delete(cache_key(action)) }
end
def self.words_for_action(action)
WatchedWord
.where(action: WatchedWord.actions[action.to_sym])
.limit(WatchedWord::MAX_WORDS_PER_ACTION)
.order(:id)
.pluck(:word, :replacement, :case_sensitive)
.to_h do |w, r, c|
[
word_to_regexp(w, match_word: false),
{ word: w, replacement: r, case_sensitive: c }.compact,
]
end
end
def self.words_for_action_exist?(action)
WatchedWord.where(action: WatchedWord.actions[action.to_sym]).exists?
end
def self.cached_words_for_action(action)
if cache_enabled?
Discourse
.cache
.fetch(cache_key(action), expires_in: 1.day) { words_for_action(action).presence }
else
words_for_action(action).presence
end
end
def self.regexps_for_action(action, engine: :ruby)
cached_words_for_action(action)&.to_h do |_, attrs|
[word_to_regexp(attrs[:word], engine: engine), attrs]
end
end
# This regexp is run in miniracer, and the client JS app
# Make sure it is compatible with major browsers when changing
# hint: non-chrome browsers do not support 'lookbehind'
def self.compiled_regexps_for_action(action, engine: :ruby, raise_errors: false)
words = cached_words_for_action(action)
return [] if words.blank?
words
.values
.group_by { |attrs| attrs[:case_sensitive] ? :case_sensitive : :case_insensitive }
.map do |group_key, attrs_list|
words = attrs_list.map { |attrs| attrs[:word] }
# Compile all watched words into a single regular expression
regexp =
words
.map do |word|
r = word_to_regexp(word, match_word: SiteSetting.watched_words_regular_expressions?)
begin
r if Regexp.new(r)
rescue RegexpError
raise if raise_errors
end
end
.select { |r| r.present? }
.join("|")
# Add word boundaries to the regexp for regular watched words
regexp =
match_word_regexp(
regexp,
engine: engine,
) if !SiteSetting.watched_words_regular_expressions?
# Add case insensitive flag if needed
Regexp.new(regexp, group_key == :case_sensitive ? nil : Regexp::IGNORECASE)
end
end
def self.serialized_regexps_for_action(action, engine: :ruby)
compiled_regexps_for_action(action, engine: engine).map do |r|
{ r.source => { case_sensitive: !r.casefold? } }
end
end
def self.word_to_regexp(word, engine: :ruby, match_word: true)
if SiteSetting.watched_words_regular_expressions?
regexp = word
regexp = "(#{regexp})" if match_word
regexp
else
# Convert word to regex by escaping special characters in a regexp.
# Avoid using Regexp.escape because it escapes more characters than
# it should (for example, whitespaces, dashes, etc)
regexp = word.gsub(/([.*+?^${}()|\[\]\\])/, '\\\\\1')
# Convert wildcards to regexp
regexp = regexp.gsub("\\*", '\S*')
regexp = match_word_regexp(regexp, engine: engine) if match_word
regexp
end
end
def self.censor(html)
regexps = compiled_regexps_for_action(:censor)
return html if regexps.blank?
doc = Nokogiri::HTML5.fragment(html)
doc.traverse do |node|
regexps.each do |regexp|
node.content = censor_text_with_regexp(node.content, regexp) if node.text?
end
end
doc.to_s
end
def self.censor_text(text)
return text if text.blank?
regexps = compiled_regexps_for_action(:censor)
return text if regexps.blank?
regexps.inject(text) { |txt, regexp| censor_text_with_regexp(txt, regexp) }
end
def self.replace_text(text)
return text if text.blank?
replace(text, :replace)
end
def self.replace_link(text)
return text if text.blank?
replace(text, :link)
end
def self.apply_to_text(text)
text = censor_text(text)
text = replace_text(text)
text = replace_link(text)
text
end
def requires_approval?
word_matches_for_action?(:require_approval)
end
def should_flag?
word_matches_for_action?(:flag)
end
def should_block?
word_matches_for_action?(:block, all_matches: true)
end
def should_silence?
word_matches_for_action?(:silence)
end
def word_matches_for_action?(action, all_matches: false)
regexps = self.class.compiled_regexps_for_action(action)
return if regexps.blank?
match_list = []
regexps.each do |regexp|
match = regexp.match(@raw)
if !all_matches
return match if match
next
end
next if !match
if SiteSetting.watched_words_regular_expressions?
set = Set.new
@raw
.scan(regexp)
.each do |m|
if Array === m
set.add(m.find(&:present?))
elsif String === m
set.add(m)
end
end
matches = set.to_a
else
matches = @raw.scan(regexp)
matches.flatten!
end
match_list.concat(matches)
end
return if match_list.blank?
match_list.compact!
match_list.uniq!
match_list.sort!
match_list
end
def word_matches?(word, case_sensitive: false)
options = case_sensitive ? nil : Regexp::IGNORECASE
Regexp.new(WordWatcher.word_to_regexp(word), options).match?(@raw)
end
def self.replace_text_with_regexp(text, regexp, replacement)
text.gsub(regexp) do |match|
prefix = ""
# match may be prefixed with a non-word character from the non-capturing group
# Ensure this isn't replaced if watched words regular expression is disabled.
if !SiteSetting.watched_words_regular_expressions? && (match[0] =~ /\W/) != nil
prefix = "#{match[0]}"
end
"#{prefix}#{replacement}"
end
end
private_class_method :replace_text_with_regexp
def self.censor_text_with_regexp(text, regexp)
text.gsub(regexp) do |match|
# the regex captures leading whitespaces
padding = match.size - match.lstrip.size
if padding > 0
match[0..padding - 1] + REPLACEMENT_LETTER * (match.size - padding)
else
REPLACEMENT_LETTER * match.size
end
end
end
private_class_method :censor_text_with_regexp
# Returns a regexp that transforms a regular expression into a regular
# expression that matches a whole word.
def self.match_word_regexp(regexp, engine: :ruby)
if engine == :js
"(?:\\P{L}|^)(#{regexp})(?=\\P{L}|$)"
elsif engine == :ruby
"(?:[^[:word:]]|^)(#{regexp})(?=[^[:word:]]|$)"
else
raise "unknown regexp engine: #{engine}"
end
end
private_class_method :match_word_regexp
def self.replace(text, watch_word_type)
regexps_for_action(watch_word_type)
.to_a
.reduce(text) do |t, (word_regexp, attrs)|
case_flag = attrs[:case_sensitive] ? nil : Regexp::IGNORECASE
replace_text_with_regexp(t, Regexp.new(word_regexp, case_flag), attrs[:replacement])
end
end
private_class_method :replace
end
``` | # frozen_string_literal: true
RSpec.describe WordWatcher do
let(:raw) { <<~RAW.strip }
Do you like liquorice?
I really like them. One could even say that I am *addicted* to liquorice. And if
you can mix it up with some anise, then I'm in heaven ;)
RAW
after { Discourse.redis.flushdb }
describe ".words_for_action" do
it "returns words with metadata including case sensitivity flag" do
Fabricate(:watched_word, action: WatchedWord.actions[:censor])
word1 = Fabricate(:watched_word, action: WatchedWord.actions[:block]).word
word2 =
Fabricate(:watched_word, action: WatchedWord.actions[:block], case_sensitive: true).word
expect(described_class.words_for_action(:block)).to include(
word1 => {
case_sensitive: false,
word: word1,
},
word2 => {
case_sensitive: true,
word: word2,
},
)
end
it "returns word with metadata including replacement if word has replacement" do
word =
Fabricate(
:watched_word,
action: WatchedWord.actions[:link],
replacement: "http://test.localhost/",
).word
expect(described_class.words_for_action(:link)).to include(
word => {
case_sensitive: false,
replacement: "http://test.localhost/",
word: word,
},
)
end
it "returns an empty hash when no words are present" do
expect(described_class.words_for_action(:tag)).to eq({})
end
end
describe ".compiled_regexps_for_action" do
let!(:word1) { Fabricate(:watched_word, action: WatchedWord.actions[:block]).word }
let!(:word2) { Fabricate(:watched_word, action: WatchedWord.actions[:block]).word }
let!(:word3) do
Fabricate(:watched_word, action: WatchedWord.actions[:block], case_sensitive: true).word
end
let!(:word4) do
Fabricate(:watched_word, action: WatchedWord.actions[:block], case_sensitive: true).word
end
context "when watched_words_regular_expressions = true" do
it "returns the proper regexp" do
SiteSetting.watched_words_regular_expressions = true
regexps = described_class.compiled_regexps_for_action(:block)
expect(regexps).to be_an(Array)
expect(regexps.map(&:inspect)).to contain_exactly(
"/(#{word1})|(#{word2})/i",
"/(#{word3})|(#{word4})/",
)
end
end
context "when watched_words_regular_expressions = false" do
it "returns the proper regexp" do
SiteSetting.watched_words_regular_expressions = false
regexps = described_class.compiled_regexps_for_action(:block)
expect(regexps).to be_an(Array)
expect(regexps.map(&:inspect)).to contain_exactly(
"/(?:[^[:word:]]|^)(#{word1}|#{word2})(?=[^[:word:]]|$)/i",
"/(?:[^[:word:]]|^)(#{word3}|#{word4})(?=[^[:word:]]|$)/",
)
end
it "is empty for an action without watched words" do
regexps = described_class.compiled_regexps_for_action(:censor)
expect(regexps).to be_an(Array)
expect(regexps).to be_empty
end
end
context "when regular expression is invalid" do
before do
SiteSetting.watched_words_regular_expressions = true
Fabricate(:watched_word, word: "Test[\S*", action: WatchedWord.actions[:block])
end
it "does not raise an exception by default" do
expect { described_class.compiled_regexps_for_action(:block) }.not_to raise_error
expect(described_class.compiled_regexps_for_action(:block)).to contain_exactly(
/(#{word1})|(#{word2})/i,
/(#{word3})|(#{word4})/,
)
end
it "raises an exception with raise_errors set to true" do
expect {
described_class.compiled_regexps_for_action(:block, raise_errors: true)
}.to raise_error(RegexpError)
end
end
end
describe "#word_matches_for_action?" do
it "is falsey when there are no watched words" do
expect(described_class.new(raw).word_matches_for_action?(:require_approval)).to be_falsey
end
context "with watched words" do
fab!(:anise) do
Fabricate(:watched_word, word: "anise", action: WatchedWord.actions[:require_approval])
end
it "is falsey without a match" do
expect(
described_class.new("No liquorice for me, thanks...").word_matches_for_action?(
:require_approval,
),
).to be_falsey
end
it "is returns matched words if there's a match" do
matches = described_class.new(raw).word_matches_for_action?(:require_approval)
expect(matches).to be_truthy
expect(matches[1]).to eq(anise.word)
end
it "finds at start of string" do
matches =
described_class.new("#{anise.word} is garbage").word_matches_for_action?(
:require_approval,
)
expect(matches[1]).to eq(anise.word)
end
it "finds at end of string" do
matches =
described_class.new("who likes #{anise.word}").word_matches_for_action?(:require_approval)
expect(matches[1]).to eq(anise.word)
end
it "finds non-letters in place of letters" do
Fabricate(:watched_word, word: "co(onut", action: WatchedWord.actions[:require_approval])
matches =
described_class.new("This co(onut is delicious.").word_matches_for_action?(
:require_approval,
)
expect(matches[1]).to eq("co(onut")
end
it "handles * for wildcards" do
Fabricate(:watched_word, word: "a**le*", action: WatchedWord.actions[:require_approval])
matches =
described_class.new("I acknowledge you.").word_matches_for_action?(:require_approval)
expect(matches[1]).to eq("acknowledge")
end
it "handles word boundary" do
Fabricate(:watched_word, word: "love", action: WatchedWord.actions[:require_approval])
expect(
described_class.new("I Love, bananas.").word_matches_for_action?(:require_approval)[1],
).to eq("Love")
expect(
described_class.new("I LOVE; apples.").word_matches_for_action?(:require_approval)[1],
).to eq("LOVE")
expect(
described_class.new("love: is a thing.").word_matches_for_action?(:require_approval)[1],
).to eq("love")
expect(
described_class.new("I love. oranges").word_matches_for_action?(:require_approval)[1],
).to eq("love")
expect(
described_class.new("I :love. pineapples").word_matches_for_action?(:require_approval)[1],
).to eq("love")
expect(
described_class.new("peace ,love and understanding.").word_matches_for_action?(
:require_approval,
)[
1
],
).to eq("love")
end
context "when there are multiple matches" do
context "with non regexp words" do
it "lists all matching words" do
%w[bananas hate hates].each do |word|
Fabricate(:watched_word, word: word, action: WatchedWord.actions[:block])
end
matches =
described_class.new("I hate bananas").word_matches_for_action?(
:block,
all_matches: true,
)
expect(matches).to contain_exactly("hate", "bananas")
matches =
described_class.new("She hates bananas too").word_matches_for_action?(
:block,
all_matches: true,
)
expect(matches).to contain_exactly("hates", "bananas")
end
end
context "with regexp words" do
before { SiteSetting.watched_words_regular_expressions = true }
it "lists all matching patterns" do
Fabricate(:watched_word, word: "(pine)?apples", action: WatchedWord.actions[:block])
Fabricate(
:watched_word,
word: "((move|store)(d)?)|((watch|listen)(ed|ing)?)",
action: WatchedWord.actions[:block],
)
matches =
described_class.new("pine pineapples apples").word_matches_for_action?(
:block,
all_matches: true,
)
expect(matches).to contain_exactly("pineapples", "apples")
matches =
described_class.new(
"go watched watch ed ing move d moveed moved moving",
).word_matches_for_action?(:block, all_matches: true)
expect(matches).to contain_exactly(*%w[watched watch move moved])
end
end
end
context "when word is an emoji" do
it "handles emoji" do
Fabricate(:watched_word, word: ":joy:", action: WatchedWord.actions[:require_approval])
matches =
described_class.new("Lots of emojis here :joy:").word_matches_for_action?(
:require_approval,
)
expect(matches[1]).to eq(":joy:")
end
it "handles unicode emoji" do
Fabricate(:watched_word, word: "🎃", action: WatchedWord.actions[:require_approval])
matches =
described_class.new("Halloween party! 🎃").word_matches_for_action?(:require_approval)
expect(matches[1]).to eq("🎃")
end
it "handles emoji skin tone" do
Fabricate(
:watched_word,
word: ":woman:t5:",
action: WatchedWord.actions[:require_approval],
)
matches =
described_class.new("To Infinity and beyond! 🚀 :woman:t5:").word_matches_for_action?(
:require_approval,
)
expect(matches[1]).to eq(":woman:t5:")
end
end
context "when word is a regular expression" do
before { SiteSetting.watched_words_regular_expressions = true }
it "supports regular expressions on word boundaries" do
Fabricate(:watched_word, word: /\btest\b/, action: WatchedWord.actions[:block])
matches = described_class.new("this is not a test.").word_matches_for_action?(:block)
expect(matches[0]).to eq("test")
end
it "supports regular expressions as a site setting" do
Fabricate(
:watched_word,
word: /tro[uo]+t/,
action: WatchedWord.actions[:require_approval],
)
matches =
described_class.new("Evil Trout is cool").word_matches_for_action?(:require_approval)
expect(matches[0]).to eq("Trout")
matches =
described_class.new("Evil Troot is cool").word_matches_for_action?(:require_approval)
expect(matches[0]).to eq("Troot")
matches = described_class.new("trooooooooot").word_matches_for_action?(:require_approval)
expect(matches[0]).to eq("trooooooooot")
end
it "support uppercase" do
Fabricate(:watched_word, word: /a\S+ce/, action: WatchedWord.actions[:require_approval])
matches = described_class.new("Amazing place").word_matches_for_action?(:require_approval)
expect(matches).to be_nil
matches =
described_class.new("Amazing applesauce").word_matches_for_action?(:require_approval)
expect(matches[0]).to eq("applesauce")
matches =
described_class.new("Amazing AppleSauce").word_matches_for_action?(:require_approval)
expect(matches[0]).to eq("AppleSauce")
end
end
context "when case sensitive words are present" do
before do
Fabricate(
:watched_word,
word: "Discourse",
action: WatchedWord.actions[:block],
case_sensitive: true,
)
end
context "when watched_words_regular_expressions = true" do
it "respects case sensitivity flag in matching words" do
SiteSetting.watched_words_regular_expressions = true
Fabricate(:watched_word, word: "p(rivate|ublic)", action: WatchedWord.actions[:block])
matches =
described_class.new(
"PUBLIC: Discourse is great for public discourse",
).word_matches_for_action?(:block, all_matches: true)
expect(matches).to contain_exactly("PUBLIC", "Discourse", "public")
end
end
context "when watched_words_regular_expressions = false" do
it "repects case sensitivity flag in matching" do
SiteSetting.watched_words_regular_expressions = false
Fabricate(:watched_word, word: "private", action: WatchedWord.actions[:block])
matches =
described_class.new(
"PRIVATE: Discourse is also great private discourse",
).word_matches_for_action?(:block, all_matches: true)
expect(matches).to contain_exactly("PRIVATE", "Discourse", "private")
end
end
end
end
end
describe "word replacement" do
fab!(:censored_word) do
Fabricate(:watched_word, word: "censored", action: WatchedWord.actions[:censor])
end
fab!(:replaced_word) do
Fabricate(
:watched_word,
word: "to replace",
replacement: "replaced",
action: WatchedWord.actions[:replace],
)
end
fab!(:link_word) do
Fabricate(
:watched_word,
word: "https://notdiscourse.org",
replacement: "https://discourse.org",
action: WatchedWord.actions[:link],
)
end
it "censors text" do
expect(described_class.censor_text("a censored word")).to eq(
"a #{described_class::REPLACEMENT_LETTER * 8} word",
)
end
it "replaces text" do
expect(described_class.replace_text("a word to replace meow")).to eq("a word replaced meow")
end
it "replaces links" do
expect(described_class.replace_link("please visit https://notdiscourse.org meow")).to eq(
"please visit https://discourse.org meow",
)
end
describe ".apply_to_text" do
it "replaces all types of words" do
text = "hello censored world to replace https://notdiscourse.org"
expected =
"hello #{described_class::REPLACEMENT_LETTER * 8} world replaced https://discourse.org"
expect(described_class.apply_to_text(text)).to eq(expected)
end
context "when watched_words_regular_expressions = true" do
it "replaces captured non-word prefix" do
SiteSetting.watched_words_regular_expressions = true
Fabricate(
:watched_word,
word: "\\Wplaceholder",
replacement: "replacement",
action: WatchedWord.actions[:replace],
)
text = "is \tplaceholder in https://notdiscourse.org"
expected = "is replacement in https://discourse.org"
expect(described_class.apply_to_text(text)).to eq(expected)
end
end
context "when watched_words_regular_expressions = false" do
it "maintains non-word character prefix" do
SiteSetting.watched_words_regular_expressions = false
text = "to replace and\thttps://notdiscourse.org"
expected = "replaced and\thttps://discourse.org"
expect(described_class.apply_to_text(text)).to eq(expected)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UsernameCheckerService
def initialize(allow_reserved_username: false)
@allow_reserved_username = allow_reserved_username
end
def check_username(username, email)
if username && username.length > 0
validator = UsernameValidator.new(username)
if !validator.valid_format?
{ errors: validator.errors }
else
check_username_availability(username, email)
end
end
end
def check_username_availability(username, email)
available =
User.username_available?(username, email, allow_reserved_username: @allow_reserved_username)
if available
{ available: true, is_developer: is_developer?(email) }
else
{ available: false, suggestion: UserNameSuggester.suggest(username) }
end
end
def is_developer?(value)
Rails.configuration.respond_to?(:developer_emails) &&
Rails.configuration.developer_emails.include?(value)
end
def self.is_developer?(email)
UsernameCheckerService.new.is_developer?(email)
end
end
``` | # frozen_string_literal: true
RSpec.describe UsernameCheckerService do
describe "#check_username" do
before do
@service = UsernameCheckerService.new
@nil_email = nil
@email = "[email protected]"
end
context "when username is invalid" do
it "rejects too short usernames" do
result = @service.check_username("a", @nil_email)
expect(result).to have_key(:errors)
end
it "rejects too long usernames" do
result = @service.check_username("a123456789b123456789c123456789", @nil_email)
expect(result).to have_key(:errors)
end
it "rejects usernames with invalid characters" do
result = @service.check_username("vincent-", @nil_email)
expect(result).to have_key(:errors)
end
it "rejects usernames that do not start with an alphanumeric character" do
result = @service.check_username(".vincent", @nil_email)
expect(result).to have_key(:errors)
end
context "with reserved usernames" do
before { SiteSetting.reserved_usernames = "test|donkey" }
it "rejects usernames that are reserved" do
result = @service.check_username("test", @nil_email)
expect(result[:available]).to eq(false)
end
it "allows reserved username checker to be skipped" do
@service = UsernameCheckerService.new(allow_reserved_username: true)
result = @service.check_username("test", @nil_email)
expect(result[:available]).to eq(true)
end
end
end
it "username not available locally" do
User.stubs(:username_available?).returns(false)
UserNameSuggester.stubs(:suggest).returns("einar-j")
result = @service.check_username("vincent", @nil_email)
expect(result[:available]).to eq(false)
expect(result[:suggestion]).to eq("einar-j")
end
it "username available locally" do
User.stubs(:username_available?).returns(true)
result = @service.check_username("vincent", @nil_email)
expect(result[:available]).to eq(true)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class HashtagAutocompleteService
HASHTAGS_PER_REQUEST = 20
SEARCH_MAX_LIMIT = 50
DEFAULT_DATA_SOURCES = [CategoryHashtagDataSource, TagHashtagDataSource]
DEFAULT_CONTEXTUAL_TYPE_PRIORITIES = [
{ type: "category", context: "topic-composer", priority: 100 },
{ type: "tag", context: "topic-composer", priority: 50 },
]
def self.search_conditions
@search_conditions ||= Enum.new(contains: 0, starts_with: 1)
end
attr_reader :guardian
# NOTE: This is not meant to be called directly; use `enabled_data_sources`
# or the individual data_source_X methods instead.
def self.data_sources
# Category and Tag data sources are in core and always should be
# included for searches and lookups.
Set.new(DEFAULT_DATA_SOURCES | DiscoursePluginRegistry.hashtag_autocomplete_data_sources)
end
def self.contextual_type_priorities
# Category and Tag type priorities for the composer are default and
# always are included.
Set.new(
DEFAULT_CONTEXTUAL_TYPE_PRIORITIES |
DiscoursePluginRegistry.hashtag_autocomplete_contextual_type_priorities,
)
end
def self.enabled_data_sources
self.data_sources.filter(&:enabled?)
end
def self.data_source_types
self.enabled_data_sources.map(&:type)
end
def self.data_source_icon_map
self.enabled_data_sources.map { |ds| [ds.type, ds.icon] }.to_h
end
def self.data_source_from_type(type)
self.enabled_data_sources.find { |ds| ds.type == type }
end
def self.find_priorities_for_context(context)
contextual_type_priorities.select { |ctp| ctp[:context] == context }
end
def self.unique_contexts
contextual_type_priorities.map { |ctp| ctp[:context] }.uniq
end
def self.ordered_types_for_context(context)
find_priorities_for_context(context)
.sort_by { |ctp| -ctp[:priority] }
.map { |ctp| ctp[:type] }
.reject { |type| data_source_types.exclude?(type) }
end
def self.contexts_with_ordered_types
Hash[unique_contexts.map { |context| [context, ordered_types_for_context(context)] }]
end
class HashtagItem
# The text to display in the UI autocomplete menu for the item.
attr_accessor :text
# Some items may want to display extra text in the UI styled differently, e.g. tag topic counts.
attr_accessor :secondary_text
# The description text to display in the UI autocomplete menu on hover.
# This will be things like e.g. category description.
attr_accessor :description
# Canonical slug for the item. Different from the ref, which can
# have the type as a suffix to distinguish between conflicts.
attr_accessor :slug
# The icon to display in the UI autocomplete menu for the item.
attr_accessor :icon
# Distinguishes between different entities e.g. tag, category.
attr_accessor :type
# Inserted into the textbox when an autocomplete item is selected,
# and must be unique so it can be used for lookups via the #lookup
# method above.
attr_accessor :ref
# The relative URL for the resource that is represented by the autocomplete
# item, used for the cooked hashtags, e.g. /c/2/staff
attr_accessor :relative_url
# The ID of the resource that is represented by the autocomplete item,
# e.g. category.id, tag.id
attr_accessor :id
def initialize(params = {})
@relative_url = params[:relative_url]
@text = params[:text]
@description = params[:description]
@icon = params[:icon]
@type = params[:type]
@ref = params[:ref]
@slug = params[:slug]
@id = params[:id]
end
def to_h
{
relative_url: self.relative_url,
text: self.text,
description: self.description,
icon: self.icon,
type: self.type,
ref: self.ref,
slug: self.slug,
id: self.id,
}
end
end
def initialize(guardian)
@guardian = guardian
end
##
# Finds resources of the provided types by their exact slugs, unlike
# search which can search partial names, slugs, etc. Used for cooking
# fully formed #hashtags in the markdown pipeline. The @guardian handles
# permissions around which results should be returned here.
#
# @param {Array} slugs The fully formed slugs to look up, which can have
# ::type suffixes attached as well (e.g. ::category),
# and in the case of categories can have parent:child
# relationships.
# @param {Array} types_in_priority_order The resource types we are looking up
# and the priority order in which we should
# match them if they do not have type suffixes.
# @returns {Hash} A hash with the types as keys and an array of HashtagItem that
# matches the provided slugs.
def lookup(slugs, types_in_priority_order)
raise Discourse::InvalidParameters.new(:slugs) if !slugs.is_a?(Array)
raise Discourse::InvalidParameters.new(:order) if !types_in_priority_order.is_a?(Array)
types_in_priority_order =
types_in_priority_order.select do |type|
HashtagAutocompleteService.data_source_types.include?(type)
end
lookup_results = Hash[types_in_priority_order.collect { |type| [type.to_sym, []] }]
limited_slugs = slugs[0..HashtagAutocompleteService::HASHTAGS_PER_REQUEST]
slugs_without_suffixes =
limited_slugs.reject do |slug|
HashtagAutocompleteService.data_source_types.any? { |type| slug.ends_with?("::#{type}") }
end
slugs_with_suffixes = (limited_slugs - slugs_without_suffixes)
# For all the slugs without a type suffix, we need to lookup in order, falling
# back to the next type if no results are returned for a slug for the current
# type. This way slugs without suffix make sense in context, e.g. in the topic
# composer we want a slug without a suffix to be a category first, tag second.
if slugs_without_suffixes.any?
types_in_priority_order.each do |type|
# We do not want to continue fallback if there are conflicting slugs where
# one has a type and one does not, this may result in duplication. An
# example:
#
# A category with slug `management` is not found because of permissions
# and we also have a slug with suffix in the form of `management::tag`.
# There is a tag that exists with the `management` slug. The tag should
# not be found here but rather in the next lookup since it's got a more
# specific lookup with the type.
slugs_to_lookup =
slugs_without_suffixes.reject { |slug| slugs_with_suffixes.include?("#{slug}::#{type}") }
found_from_slugs = execute_lookup!(lookup_results, type, guardian, slugs_to_lookup)
slugs_without_suffixes = slugs_without_suffixes - found_from_slugs.map(&:ref)
break if slugs_without_suffixes.empty?
end
end
# We then look up the remaining slugs based on their type suffix, stripping out
# the type suffix first since it will not match the actual slug.
if slugs_with_suffixes.any?
types_in_priority_order.each do |type|
slugs_for_type =
slugs_with_suffixes
.select { |slug| slug.ends_with?("::#{type}") }
.map { |slug| slug.gsub("::#{type}", "") }
next if slugs_for_type.empty?
execute_lookup!(lookup_results, type, guardian, slugs_for_type)
# Make sure the refs are the same going out as they were going in.
lookup_results[type.to_sym].each do |item|
item.ref = "#{item.ref}::#{type}" if slugs_with_suffixes.include?("#{item.ref}::#{type}")
end
end
end
lookup_results
end
##
# Searches registered hashtag data sources using the provided term (data
# sources determine what is actually searched) and prioritises the results
# based on types_in_priority_order and the limit. For example, if 5 categories
# were returned for the term and the limit was 5, we would not even bother
# searching tags. The @guardian handles permissions around which results should
# be returned here.
#
# Items which have a slug that exactly matches the search term via lookup will be found
# first and floated to the top of the results, and still be ordered by type.
#
# @param {String} term Search term, from the UI generally where the user is typing #has...
# @param {Array} types_in_priority_order The resource types we are searching for
# and the priority order in which we should
# return them.
# @param {Integer} limit The maximum number of search results to return, we don't
# bother searching subsequent types if the first types in
# the array already reach the limit.
# @returns {Array} The results as HashtagItems
def search(
term,
types_in_priority_order,
limit: SiteSetting.experimental_hashtag_search_result_limit
)
raise Discourse::InvalidParameters.new(:order) if !types_in_priority_order.is_a?(Array)
limit = [limit, SEARCH_MAX_LIMIT].min
types_in_priority_order =
types_in_priority_order.select do |type|
HashtagAutocompleteService.data_source_types.include?(type)
end
return search_without_term(types_in_priority_order, limit) if term.blank?
limited_results = []
top_ranked_type = nil
term = term.downcase
# Float exact matches by slug to the top of the list, any of these will be excluded
# from further results.
types_in_priority_order.each do |type|
search_results = execute_lookup!(nil, type, guardian, [term])
limited_results.concat(search_results) if search_results
break if limited_results.length >= limit
end
# Next priority are slugs which start with the search term.
if limited_results.length < limit
types_in_priority_order.each do |type|
limited_results =
search_using_condition(
limited_results,
term,
type,
limit,
HashtagAutocompleteService.search_conditions[:starts_with],
)
top_ranked_type = type if top_ranked_type.nil?
break if limited_results.length >= limit
end
end
# Search the data source for each type, validate and sort results,
# and break off from searching more data sources if we reach our limit
if limited_results.length < limit
types_in_priority_order.each do |type|
limited_results =
search_using_condition(
limited_results,
term,
type,
limit,
HashtagAutocompleteService.search_conditions[:contains],
)
top_ranked_type = type if top_ranked_type.nil?
break if limited_results.length >= limit
end
end
# Any items that are _not_ the top-ranked type (which could possibly not be
# the same as the first item in the types_in_priority_order if there was
# no data for that type) that have conflicting slugs with other items for
# other higher-ranked types need to have a ::type suffix added to their ref.
#
# This will be used for the lookup method above if one of these items is
# chosen in the UI, otherwise there is no way to determine whether a hashtag is
# for a category or a tag etc.
#
# For example, if there is a category with the slug #general and a tag
# with the slug #general, then the tag will have its ref changed to #general::tag
append_types_to_conflicts(limited_results, top_ranked_type, types_in_priority_order, limit)
end
private
def search_using_condition(limited_results, term, type, limit, condition)
search_results =
search_for_type(type, guardian, term, limit - limited_results.length, condition)
return limited_results if search_results.empty?
search_results =
HashtagAutocompleteService.data_source_from_type(type).search_sort(
search_results.reject do |item|
limited_results.any? { |exact| exact.type == type && exact.slug === item.slug }
end,
term,
)
limited_results.concat(search_results)
end
def search_without_term(types_in_priority_order, limit)
split_limit = (limit.to_f / types_in_priority_order.length.to_f).ceil
limited_results = []
types_in_priority_order.each do |type|
search_results =
filter_valid_data_items(
HashtagAutocompleteService.data_source_from_type(type).search_without_term(
guardian,
split_limit,
),
)
next if search_results.empty?
# This is purposefully unsorted as search_without_term should sort
# in its own way.
limited_results.concat(set_types(set_refs(search_results), type))
end
limited_results.take(limit)
end
# Sometimes a specific ref is required, e.g. for categories that have
# a parent their ref will be parent_slug:child_slug, though most of the
# time it will be the same as the slug. The ref can then be used for
# lookup in the UI.
def set_refs(hashtag_items)
hashtag_items.each { |item| item.ref ||= item.slug }
end
def set_types(hashtag_items, type)
hashtag_items.each { |item| item.type = type }
end
def filter_valid_data_items(items)
items.select { |item| item.kind_of?(HashtagItem) && item.slug.present? && item.text.present? }
end
def search_for_type(
type,
guardian,
term,
limit,
condition = HashtagAutocompleteService.search_conditions[:contains]
)
filter_valid_data_items(
set_types(
set_refs(
HashtagAutocompleteService.data_source_from_type(type).search(
guardian,
term,
limit,
condition,
),
),
type,
),
)
end
def execute_lookup!(lookup_results, type, guardian, slugs)
found_from_slugs = filter_valid_data_items(lookup_for_type(type, guardian, slugs))
found_from_slugs.sort_by! { |item| item.text.downcase }
if lookup_results.present?
lookup_results[type.to_sym] = lookup_results[type.to_sym].concat(found_from_slugs)
end
found_from_slugs
end
def lookup_for_type(type, guardian, slugs)
set_types(
set_refs(HashtagAutocompleteService.data_source_from_type(type).lookup(guardian, slugs)),
type,
)
end
def append_types_to_conflicts(limited_results, top_ranked_type, types_in_priority_order, limit)
limited_results.each do |hashtag_item|
next if hashtag_item.type == top_ranked_type
# We only need to change the ref to include the type if there is a
# higher-ranked hashtag slug that conflicts with this one.
higher_ranked_types =
types_in_priority_order.slice(0, types_in_priority_order.index(hashtag_item.type))
higher_ranked_slugs =
limited_results
.reject { |r| r.type === hashtag_item.type }
.select { |r| higher_ranked_types.include?(r.type) }
.map(&:slug)
if higher_ranked_slugs.include?(hashtag_item.slug)
hashtag_item.ref = "#{hashtag_item.ref}::#{hashtag_item.type}"
end
end
limited_results.take(limit)
end
end
``` | # frozen_string_literal: true
RSpec.describe HashtagAutocompleteService do
subject(:service) { described_class.new(guardian) }
fab!(:user)
fab!(:category1) { Fabricate(:category, name: "The Book Club", slug: "the-book-club") }
fab!(:tag1) do
Fabricate(:tag, name: "great-books", staff_topic_count: 22, public_topic_count: 22)
end
fab!(:topic1) { Fabricate(:topic) }
let(:guardian) { Guardian.new(user) }
after { DiscoursePluginRegistry.reset! }
describe ".enabled_data_sources" do
it "only returns data sources that are enabled" do
expect(HashtagAutocompleteService.enabled_data_sources).to eq(
HashtagAutocompleteService::DEFAULT_DATA_SOURCES,
)
end
end
describe ".contexts_with_ordered_types" do
it "returns a hash of all the registered search contexts and their types in the defined priority order" do
expect(HashtagAutocompleteService.contexts_with_ordered_types).to eq(
{ "topic-composer" => %w[category tag] },
)
DiscoursePluginRegistry.register_hashtag_autocomplete_contextual_type_priority(
{ type: "category", context: "awesome-composer", priority: 50 },
stub(enabled?: true),
)
DiscoursePluginRegistry.register_hashtag_autocomplete_contextual_type_priority(
{ type: "tag", context: "awesome-composer", priority: 100 },
stub(enabled?: true),
)
expect(HashtagAutocompleteService.contexts_with_ordered_types).to eq(
{ "topic-composer" => %w[category tag], "awesome-composer" => %w[tag category] },
)
end
it "does not return types which have been disabled" do
SiteSetting.tagging_enabled = false
expect(HashtagAutocompleteService.contexts_with_ordered_types).to eq(
{ "topic-composer" => %w[category] },
)
end
end
describe ".data_source_icon_map" do
it "gets an array for all icons defined by data sources so they can be used for markdown allowlisting" do
expect(HashtagAutocompleteService.data_source_icon_map).to eq(
{ "category" => "folder", "tag" => "tag" },
)
end
end
describe "#search" do
it "returns search results for tags and categories by default" do
expect(service.search("book", %w[category tag]).map(&:text)).to eq(
["The Book Club", "great-books"],
)
end
it "respects the types_in_priority_order param" do
expect(service.search("book", %w[tag category]).map(&:text)).to eq(
["great-books", "The Book Club"],
)
end
it "respects the limit param" do
expect(service.search("book", %w[tag category], limit: 1).map(&:text)).to eq(["great-books"])
end
it "does not allow more than SEARCH_MAX_LIMIT results to be specified by the limit param" do
stub_const(HashtagAutocompleteService, "SEARCH_MAX_LIMIT", 1) do
expect(service.search("book", %w[category tag], limit: 1000).map(&:text)).to eq(
["The Book Club"],
)
end
end
it "does not search other data sources if the limit is reached by earlier type data sources" do
# only expected once to try get the exact matches first
DiscourseTagging.expects(:filter_allowed_tags).never
service.search("the-book", %w[category tag], limit: 1)
end
it "includes the tag count" do
tag1.update!(staff_topic_count: 78, public_topic_count: 78)
expect(service.search("book", %w[tag category]).map(&:text)).to eq(
["great-books", "The Book Club"],
)
end
it "does case-insensitive search" do
expect(service.search("bOOk", %w[category tag]).map(&:text)).to eq(
["The Book Club", "great-books"],
)
end
it "can search categories by name or slug" do
expect(service.search("the-book-club", %w[category]).map(&:text)).to eq(["The Book Club"])
expect(service.search("Book C", %w[category]).map(&:text)).to eq(["The Book Club"])
end
it "does not include categories the user cannot access" do
category1.update!(read_restricted: true)
expect(service.search("book", %w[tag category]).map(&:text)).to eq(["great-books"])
end
it "does not include tags the user cannot access" do
Fabricate(:tag_group, permissions: { "staff" => 1 }, tag_names: ["great-books"])
expect(service.search("book", %w[tag]).map(&:text)).to be_empty
end
it "includes other data sources" do
Fabricate(:bookmark, user: user, name: "read review of this fantasy book")
Fabricate(:bookmark, user: user, name: "cool rock song")
guardian.user.reload
DiscoursePluginRegistry.register_hashtag_autocomplete_data_source(
FakeBookmarkHashtagDataSource,
stub(enabled?: true),
)
expect(service.search("book", %w[category tag bookmark]).map(&:text)).to eq(
["The Book Club", "great-books", "read review of this fantasy book"],
)
end
it "handles refs for categories that have a parent" do
parent = Fabricate(:category, name: "Hobbies", slug: "hobbies")
category1.update!(parent_category: parent)
expect(service.search("book", %w[category tag]).map(&:ref)).to eq(
%w[hobbies:the-book-club great-books],
)
category1.update!(parent_category: nil)
end
it "appends type suffixes for the ref on conflicting slugs on items that are not the top priority type" do
Fabricate(:tag, name: "the-book-club")
expect(service.search("book", %w[category tag]).map(&:ref)).to eq(
%w[the-book-club great-books the-book-club::tag],
)
Fabricate(:bookmark, user: user, name: "book club")
guardian.user.reload
DiscoursePluginRegistry.register_hashtag_autocomplete_data_source(
FakeBookmarkHashtagDataSource,
stub(enabled?: true),
)
expect(service.search("book", %w[category tag bookmark]).map(&:ref)).to eq(
%w[book-club the-book-club great-books the-book-club::tag],
)
end
it "does not add a type suffix where
1. a subcategory name conflicts with an existing tag name and
2. the category is not the top ranked type" do
parent = Fabricate(:category, name: "Hobbies", slug: "hobbies")
category1.update!(parent_category: parent)
Fabricate(:tag, name: "the-book-club")
Fabricate(:bookmark, user: user, name: "book club")
guardian.user.reload
DiscoursePluginRegistry.register_hashtag_autocomplete_data_source(
FakeBookmarkHashtagDataSource,
stub(enabled?: true),
)
expect(service.search("book", %w[bookmark category tag]).map(&:ref)).to eq(
%w[book-club hobbies:the-book-club great-books the-book-club::tag],
)
end
it "handles the type suffix where the top ranked type conflicts with a subcategory" do
parent = Fabricate(:category, name: "Hobbies", slug: "hobbies")
category1.update!(parent_category: parent)
Fabricate(:tag, name: "the-book-club")
Fabricate(:bookmark, user: user, name: "the book club")
guardian.user.reload
DiscoursePluginRegistry.register_hashtag_autocomplete_data_source(
FakeBookmarkHashtagDataSource,
stub(enabled?: true),
)
expect(service.search("book", %w[bookmark category tag]).map(&:ref)).to eq(
%w[the-book-club hobbies:the-book-club::category great-books the-book-club::tag],
)
end
it "orders results by (with type ordering within each section):
1. exact match on slug (ignoring parent/child distinction for categories)
2. slugs that start with the term
3. then name for everything else" do
category2 = Fabricate(:category, name: "Book Library", slug: "book-library")
Fabricate(:category, name: "Horror", slug: "book", parent_category: category2)
Fabricate(:category, name: "Romance", slug: "romance-books")
Fabricate(:category, name: "Abstract Philosophy", slug: "abstract-philosophy-books")
category6 = Fabricate(:category, name: "Book Reviews", slug: "book-reviews")
Fabricate(:category, name: "Good Books", slug: "book", parent_category: category6)
Fabricate(:tag, name: "bookmania", staff_topic_count: 15, public_topic_count: 15)
Fabricate(:tag, name: "awful-books", staff_topic_count: 56, public_topic_count: 56)
expect(service.search("book", %w[category tag]).map(&:ref)).to eq(
[
"book-reviews:book", # category exact match on slug, name sorted
"book-library:book",
"book-library", # category starts with match on slug, name sorted
"book-reviews",
"bookmania", # tag starts with match on slug, name sorted
"abstract-philosophy-books", # category partial match on slug, name sorted
"romance-books",
"the-book-club",
"awful-books", # tag partial match on slug, name sorted
"great-books",
],
)
expect(service.search("book", %w[category tag]).map(&:text)).to eq(
[
"Good Books",
"Horror",
"Book Library",
"Book Reviews",
"bookmania",
"Abstract Philosophy",
"Romance",
"The Book Club",
"awful-books",
"great-books",
],
)
end
context "when multiple tags and categories are returned" do
fab!(:category2) { Fabricate(:category, name: "Book Zone", slug: "book-zone") }
fab!(:category3) { Fabricate(:category, name: "Book Dome", slug: "book-dome") }
fab!(:category4) { Fabricate(:category, name: "Bookworld", slug: "book") }
fab!(:tag2) { Fabricate(:tag, name: "mid-books") }
fab!(:tag3) { Fabricate(:tag, name: "terrible-books") }
fab!(:tag4) { Fabricate(:tag, name: "book") }
it "orders them by name within their type order and prioritizes exact matches to the top of the list" do
expect(service.search("book", %w[category tag], limit: 10).map(&:ref)).to eq(
%w[book book::tag book-dome book-zone the-book-club great-books mid-books terrible-books],
)
end
end
context "when not tagging_enabled" do
before { SiteSetting.tagging_enabled = false }
it "does not return any tags" do
expect(service.search("book", %w[category tag]).map(&:text)).to eq(["The Book Club"])
end
end
context "when no term is provided (default results) triggered by a # with no characters in the UI" do
fab!(:category2) do
Fabricate(:category, name: "Book Zone", slug: "book-zone", topic_count: 546)
end
fab!(:category3) do
Fabricate(:category, name: "Book Dome", slug: "book-dome", topic_count: 987)
end
fab!(:category4) { Fabricate(:category, name: "Bookworld", slug: "book", topic_count: 56) }
fab!(:category5) { Fabricate(:category, name: "Media", slug: "media", topic_count: 446) }
fab!(:tag2) do
Fabricate(:tag, name: "mid-books", staff_topic_count: 33, public_topic_count: 33)
end
fab!(:tag3) do
Fabricate(:tag, name: "terrible-books", staff_topic_count: 2, public_topic_count: 2)
end
fab!(:tag4) { Fabricate(:tag, name: "book", staff_topic_count: 1, public_topic_count: 1) }
it "returns the 'most popular' categories and tags (based on topic_count) that the user can access" do
category1.update!(read_restricted: true)
Fabricate(:tag_group, permissions: { "staff" => 1 }, tag_names: ["terrible-books"])
expect(service.search(nil, %w[category tag]).map(&:text)).to eq(
[
"Book Dome",
"Book Zone",
"Media",
"Bookworld",
Category.find(SiteSetting.uncategorized_category_id).name,
"mid-books",
"great-books",
"book",
],
)
end
it "does not error if a type provided for priority order has been disabled" do
SiteSetting.tagging_enabled = false
expect(service.search(nil, %w[category tag]).map(&:ref)).to eq(
%w[book-dome book-zone media book uncategorized the-book-club],
)
end
end
end
describe "#lookup" do
fab!(:tag2) { Fabricate(:tag, name: "fiction-books") }
it "returns category and tag in a hash format with the slug and url" do
result = service.lookup(%w[the-book-club great-books fiction-books], %w[category tag])
expect(result[:category].map(&:slug)).to eq(["the-book-club"])
expect(result[:category].map(&:relative_url)).to eq(["/c/the-book-club/#{category1.id}"])
expect(result[:tag].map(&:slug)).to eq(%w[fiction-books great-books])
expect(result[:tag].map(&:relative_url)).to eq(%w[/tag/fiction-books /tag/great-books])
end
it "does not include category the user cannot access" do
category1.update!(read_restricted: true)
result = service.lookup(%w[the-book-club great-books fiction-books], %w[category tag])
expect(result[:category]).to eq([])
end
it "does not include tag the user cannot access" do
Fabricate(:tag_group, permissions: { "staff" => 1 }, tag_names: ["great-books"])
result = service.lookup(%w[the-book-club great-books fiction-books], %w[category tag])
expect(result[:tag].map(&:slug)).to eq(%w[fiction-books])
expect(result[:tag].map(&:relative_url)).to eq(["/tag/fiction-books"])
end
it "handles type suffixes for slugs" do
result =
service.lookup(%w[the-book-club::category great-books::tag fiction-books], %w[category tag])
expect(result[:category].map(&:slug)).to eq(["the-book-club"])
expect(result[:category].map(&:relative_url)).to eq(["/c/the-book-club/#{category1.id}"])
expect(result[:tag].map(&:slug)).to eq(%w[fiction-books great-books])
expect(result[:tag].map(&:relative_url)).to eq(%w[/tag/fiction-books /tag/great-books])
end
it "handles parent:child category lookups" do
parent_category = Fabricate(:category, name: "Media", slug: "media")
category1.update!(parent_category: parent_category)
result = service.lookup(%w[media:the-book-club], %w[category tag])
expect(result[:category].map(&:slug)).to eq(["the-book-club"])
expect(result[:category].map(&:ref)).to eq(["media:the-book-club"])
expect(result[:category].map(&:relative_url)).to eq(
["/c/media/the-book-club/#{category1.id}"],
)
category1.update!(parent_category: nil)
end
it "handles parent:child category lookups with type suffix" do
parent_category = Fabricate(:category, name: "Media", slug: "media")
category1.update!(parent_category: parent_category)
result = service.lookup(%w[media:the-book-club::category], %w[category tag])
expect(result[:category].map(&:slug)).to eq(["the-book-club"])
expect(result[:category].map(&:ref)).to eq(["media:the-book-club::category"])
expect(result[:category].map(&:relative_url)).to eq(
["/c/media/the-book-club/#{category1.id}"],
)
category1.update!(parent_category: nil)
end
it "does not return the category if the parent does not match the child" do
parent_category = Fabricate(:category, name: "Media", slug: "media")
category1.update!(parent_category: parent_category)
result = service.lookup(%w[bad-parent:the-book-club], %w[category tag])
expect(result[:category]).to be_empty
end
it "for slugs without a type suffix it falls back in type order until a result is found or types are exhausted" do
result = service.lookup(%w[the-book-club great-books fiction-books], %w[category tag])
expect(result[:category].map(&:slug)).to eq(["the-book-club"])
expect(result[:category].map(&:relative_url)).to eq(["/c/the-book-club/#{category1.id}"])
expect(result[:tag].map(&:slug)).to eq(%w[fiction-books great-books])
expect(result[:tag].map(&:relative_url)).to eq(%w[/tag/fiction-books /tag/great-books])
category2 = Fabricate(:category, name: "Great Books", slug: "great-books")
result = service.lookup(%w[the-book-club great-books fiction-books], %w[category tag])
expect(result[:category].map(&:slug)).to eq(%w[great-books the-book-club])
expect(result[:category].map(&:relative_url)).to eq(
["/c/great-books/#{category2.id}", "/c/the-book-club/#{category1.id}"],
)
expect(result[:tag].map(&:slug)).to eq(%w[fiction-books])
expect(result[:tag].map(&:relative_url)).to eq(%w[/tag/fiction-books])
category1.destroy!
Fabricate(:tag, name: "the-book-club")
result = service.lookup(%w[the-book-club great-books fiction-books], %w[category tag])
expect(result[:category].map(&:slug)).to eq(["great-books"])
expect(result[:category].map(&:relative_url)).to eq(["/c/great-books/#{category2.id}"])
expect(result[:tag].map(&:slug)).to eq(%w[fiction-books the-book-club])
expect(result[:tag].map(&:relative_url)).to eq(%w[/tag/fiction-books /tag/the-book-club])
result = service.lookup(%w[the-book-club great-books fiction-books], %w[tag category])
expect(result[:category]).to eq([])
expect(result[:tag].map(&:slug)).to eq(%w[fiction-books great-books the-book-club])
expect(result[:tag].map(&:relative_url)).to eq(
%w[/tag/fiction-books /tag/great-books /tag/the-book-club],
)
end
it "includes other data sources" do
Fabricate(:bookmark, user: user, name: "read review of this fantasy book")
Fabricate(:bookmark, user: user, name: "coolrock")
guardian.user.reload
DiscoursePluginRegistry.register_hashtag_autocomplete_data_source(
FakeBookmarkHashtagDataSource,
stub(enabled?: true),
)
result = service.lookup(["coolrock"], %w[category tag bookmark])
expect(result[:bookmark].map(&:slug)).to eq(["coolrock"])
end
it "handles type suffix lookups where there is another type with a conflicting slug that the user cannot access" do
category1.update!(read_restricted: true)
Fabricate(:tag, name: "the-book-club")
result = service.lookup(%w[the-book-club::tag the-book-club], %w[category tag])
expect(result[:category].map(&:ref)).to eq([])
expect(result[:tag].map(&:ref)).to eq(["the-book-club::tag"])
end
context "when not tagging_enabled" do
before { SiteSetting.tagging_enabled = false }
it "does not return tag" do
result = service.lookup(%w[the-book-club great-books fiction-books], %w[category tag])
expect(result[:category].map(&:slug)).to eq(["the-book-club"])
expect(result[:category].map(&:relative_url)).to eq(["/c/the-book-club/#{category1.id}"])
expect(result[:tag]).to eq(nil)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class HeatSettingsUpdater
def self.update
return unless SiteSetting.automatic_topic_heat_values
views_by_percentile = views_thresholds
update_setting(:topic_views_heat_high, views_by_percentile[10])
update_setting(:topic_views_heat_medium, views_by_percentile[25])
update_setting(:topic_views_heat_low, views_by_percentile[45])
like_ratios_by_percentile = like_ratio_thresholds
update_setting(:topic_post_like_heat_high, like_ratios_by_percentile[10])
update_setting(:topic_post_like_heat_medium, like_ratios_by_percentile[25])
update_setting(:topic_post_like_heat_low, like_ratios_by_percentile[45])
end
def self.views_thresholds
results = DB.query(<<~SQL)
SELECT ranked.bucket * 5 as percentile, MIN(ranked.views) as views
FROM (
SELECT NTILE(20) OVER (ORDER BY t.views DESC) AS bucket, t.views
FROM (
SELECT views
FROM topics
WHERE deleted_at IS NULL
AND archetype <> 'private_message'
AND visible = TRUE
) t
) ranked
WHERE bucket <= 9
GROUP BY bucket
SQL
results.inject({}) do |h, row|
h[row.percentile] = row.views
h
end
end
def self.like_ratio_thresholds
results = DB.query(<<~SQL)
SELECT ranked.bucket * 5 as percentile, MIN(ranked.ratio) as like_ratio
FROM (
SELECT NTILE(20) OVER (ORDER BY t.ratio DESC) AS bucket, t.ratio
FROM (
SELECT like_count::decimal / posts_count AS ratio
FROM topics
WHERE deleted_at IS NULL
AND archetype <> 'private_message'
AND visible = TRUE
AND posts_count >= 10
AND like_count > 0
ORDER BY created_at DESC
LIMIT 1000
) t
) ranked
WHERE bucket <= 9
GROUP BY bucket
SQL
results.inject({}) do |h, row|
h[row.percentile] = row.like_ratio
h
end
end
def self.update_setting(name, new_value)
if new_value.nil? || new_value <= SiteSetting.defaults[name]
if SiteSetting.get(name) != SiteSetting.defaults[name]
SiteSetting.set_and_log(name, SiteSetting.defaults[name])
end
elsif SiteSetting.get(name) == 0 || (new_value.to_f / SiteSetting.get(name) - 1.0).abs >= 0.05
rounded_new_value =
if new_value.is_a?(Integer)
if new_value > 9
digits = new_value.digits.reverse
(digits[0] * 10 + digits[1]) * 10.pow(digits[2..-1].size)
else
new_value
end
else
new_value.round(2)
end
SiteSetting.set_and_log(name, rounded_new_value) if SiteSetting.get(name) != rounded_new_value
end
end
end
``` | # frozen_string_literal: true
RSpec.describe HeatSettingsUpdater do
describe ".update" do
subject(:update_settings) { described_class.update }
def expect_default_values
%i[topic_views_heat topic_post_like_heat].each do |prefix|
%i[low medium high].each do |level|
setting_name = "#{prefix}_#{level}"
expect(SiteSetting.get(setting_name)).to eq(SiteSetting.defaults[setting_name])
end
end
end
it "changes nothing on fresh install" do
expect { update_settings }.to_not change { UserHistory.count }
expect_default_values
end
context "with low activity" do
let!(:hottest_topic1) { Fabricate(:topic, views: 3000, posts_count: 10, like_count: 2) }
let!(:hottest_topic2) { Fabricate(:topic, views: 3000, posts_count: 10, like_count: 2) }
let!(:warm_topic1) { Fabricate(:topic, views: 1500, posts_count: 10, like_count: 1) }
let!(:warm_topic2) { Fabricate(:topic, views: 1500, posts_count: 10, like_count: 1) }
let!(:warm_topic3) { Fabricate(:topic, views: 1500, posts_count: 10, like_count: 1) }
let!(:lukewarm_topic1) { Fabricate(:topic, views: 800, posts_count: 10, like_count: 0) }
let!(:lukewarm_topic2) { Fabricate(:topic, views: 800, posts_count: 10, like_count: 0) }
let!(:lukewarm_topic3) { Fabricate(:topic, views: 800, posts_count: 10, like_count: 0) }
let!(:lukewarm_topic4) { Fabricate(:topic, views: 800, posts_count: 10, like_count: 0) }
let!(:cold_topic) { Fabricate(:topic, views: 100, posts_count: 10, like_count: 0) }
it "doesn't make settings lower than defaults" do
expect { update_settings }.to_not change { UserHistory.count }
expect_default_values
end
it "can set back down to minimum defaults" do
%i[low medium high].each do |level|
SiteSetting.set("topic_views_heat_#{level}", 20_000)
SiteSetting.set("topic_post_like_heat_#{level}", 5.0)
end
expect { update_settings }.to change { UserHistory.count }.by(6)
expect_default_values
end
end
context "with similar activity" do
let!(:hottest_topic1) { Fabricate(:topic, views: 3530, posts_count: 100, like_count: 201) }
let!(:hottest_topic2) { Fabricate(:topic, views: 3530, posts_count: 100, like_count: 201) }
let!(:warm_topic1) { Fabricate(:topic, views: 2020, posts_count: 100, like_count: 99) }
let!(:warm_topic2) { Fabricate(:topic, views: 2020, posts_count: 100, like_count: 99) }
let!(:warm_topic3) { Fabricate(:topic, views: 2020, posts_count: 100, like_count: 99) }
let!(:lukewarm_topic1) { Fabricate(:topic, views: 1010, posts_count: 100, like_count: 51) }
let!(:lukewarm_topic2) { Fabricate(:topic, views: 1010, posts_count: 100, like_count: 51) }
let!(:lukewarm_topic3) { Fabricate(:topic, views: 1010, posts_count: 100, like_count: 51) }
let!(:lukewarm_topic4) { Fabricate(:topic, views: 1010, posts_count: 100, like_count: 51) }
let!(:cold_topic) { Fabricate(:topic, views: 100, posts_count: 100, like_count: 1) }
it "doesn't make small changes" do
expect { update_settings }.to_not change { UserHistory.count }
expect_default_values
end
end
context "with increased activity" do
let!(:hottest_topic1) { Fabricate(:topic, views: 10_100, posts_count: 100, like_count: 230) }
let!(:hottest_topic2) { Fabricate(:topic, views: 10_012, posts_count: 100, like_count: 220) }
let!(:warm_topic1) { Fabricate(:topic, views: 4020, posts_count: 99, like_count: 126) }
let!(:warm_topic2) { Fabricate(:topic, views: 4010, posts_count: 99, like_count: 116) }
let!(:warm_topic3) { Fabricate(:topic, views: 4005, posts_count: 99, like_count: 106) }
let!(:lukewarm_topic1) { Fabricate(:topic, views: 2040, posts_count: 99, like_count: 84) }
let!(:lukewarm_topic2) { Fabricate(:topic, views: 2030, posts_count: 99, like_count: 74) }
let!(:lukewarm_topic3) { Fabricate(:topic, views: 2020, posts_count: 99, like_count: 64) }
let!(:lukewarm_topic4) { Fabricate(:topic, views: 2002, posts_count: 99, like_count: 54) }
let!(:cold_topic) { Fabricate(:topic, views: 100, posts_count: 100, like_count: 1) }
it "changes settings when difference is significant" do
expect { update_settings }.to change { UserHistory.count }.by(6)
expect(SiteSetting.topic_views_heat_high).to eq(10_000)
expect(SiteSetting.topic_views_heat_medium).to eq(4000)
expect(SiteSetting.topic_views_heat_low).to eq(2000)
expect(SiteSetting.topic_post_like_heat_high).to eq(2.2)
expect(SiteSetting.topic_post_like_heat_medium).to eq(1.07)
expect(SiteSetting.topic_post_like_heat_low).to eq(0.55)
end
it "doesn't change settings when automatic_topic_heat_values is false" do
SiteSetting.automatic_topic_heat_values = false
expect { update_settings }.to_not change { UserHistory.count }
expect_default_values
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UsernameChanger
def initialize(user, new_username, actor = nil)
@user = user
@old_username = user.username
@new_username = new_username
@actor = actor
end
def self.change(user, new_username, actor = nil)
self.new(user, new_username, actor).change
end
def self.override(user, new_username)
if user.username_equals_to?(new_username)
# override anyway since case could've been changed:
UsernameChanger.change(user, new_username, user)
true
elsif user.username != UserNameSuggester.fix_username(new_username)
suggested_username = UserNameSuggester.suggest(new_username, current_username: user.username)
UsernameChanger.change(user, suggested_username, user)
true
else
false
end
end
def change(asynchronous: true, run_update_job: true)
return false if @user.username == @new_username
@user.username = @new_username
if @user.save
if @actor && @old_username != @new_username
StaffActionLogger.new(@actor).log_username_change(@user, @old_username, @new_username)
end
if run_update_job
UsernameChanger.update_username(
user_id: @user.id,
old_username: @old_username,
new_username: @new_username,
avatar_template: @user.avatar_template_url,
asynchronous: asynchronous,
)
end
return true
end
false
end
def self.update_username(
user_id:,
old_username:,
new_username:,
avatar_template:,
asynchronous: true
)
args = {
user_id: user_id,
old_username: old_username,
new_username: new_username,
avatar_template: avatar_template,
}
if asynchronous
Jobs.enqueue(:update_username, args)
else
Jobs::UpdateUsername.new.execute(args)
end
end
end
``` | # frozen_string_literal: true
RSpec.describe UsernameChanger do
before { Jobs.run_immediately! }
describe "#change" do
let(:user) { Fabricate(:user) }
context "when everything goes well" do
let!(:old_username) { user.username }
it "should change the username" do
new_username = "#{user.username}1234"
events =
DiscourseEvent
.track_events { @result = UsernameChanger.change(user, new_username) }
.last(2)
expect(@result).to eq(true)
event = events.first
expect(event[:event_name]).to eq(:username_changed)
expect(event[:params].first).to eq(old_username)
expect(event[:params].second).to eq(new_username)
event = events.last
expect(event[:event_name]).to eq(:user_updated)
expect(event[:params].first).to eq(user)
user.reload
expect(user.username).to eq(new_username)
expect(user.username_lower).to eq(new_username.downcase)
end
it "do nothing if the new username is the same" do
new_username = user.username
events =
DiscourseEvent.track_events { @result = UsernameChanger.change(user, new_username) }
expect(@result).to eq(false)
expect(events.count).to be_zero
end
end
context "when something goes wrong" do
let(:wrong_username) { "" }
let(:username_before_change) { user.username }
let(:username_lower_before_change) { user.username_lower }
it "should not change the username" do
@result = UsernameChanger.change(user, wrong_username)
expect(@result).to eq(false)
user.reload
expect(user.username).to eq(username_before_change)
expect(user.username_lower).to eq(username_lower_before_change)
end
end
context "when changing the case of my username" do
let!(:myself) { Fabricate(:user, username: "hansolo") }
it "should change the username" do
expect do
expect(UsernameChanger.change(myself, "HanSolo", myself)).to eq(true)
end.to change { UserHistory.count }.by(1)
expect(UserHistory.last.action).to eq(UserHistory.actions[:change_username])
expect(myself.reload.username).to eq("HanSolo")
expect do UsernameChanger.change(myself, "HanSolo", myself) end.not_to change {
UserHistory.count
} # make sure it does not log a dupe
expect do UsernameChanger.change(myself, user.username, myself) end.not_to change {
UserHistory.count
} # does not log if the username already exists
end
end
describe "allow custom minimum username length from site settings" do
before do
@custom_min = 2
SiteSetting.min_username_length = @custom_min
end
it "should allow a shorter username than default" do
result = UsernameChanger.change(user, "a" * @custom_min)
expect(result).not_to eq(false)
end
it "should not allow a shorter username than limit" do
result = UsernameChanger.change(user, "a" * (@custom_min - 1))
expect(result).to eq(false)
end
it "should not allow a longer username than limit" do
result = UsernameChanger.change(user, "a" * (User.username_length.end + 1))
expect(result).to eq(false)
end
end
context "when there are posts and revisions" do
let(:user) { Fabricate(:user, username: "foo") }
let(:topic) { Fabricate(:topic, user: user) }
before do
UserActionManager.enable
Discourse.expects(:warn_exception).never
end
def create_post_and_change_username(args = {}, &block)
stub_image_size
post = create_post(args.merge(topic_id: topic.id))
args
.delete(:revisions)
&.each { |revision| post.revise(post.user, revision, force_new_version: true) }
block.call(post) if block
UsernameChanger.change(user, args[:target_username] || "bar")
post.reload
end
context "when there are mentions" do
it "rewrites cooked correctly" do
post = create_post_and_change_username(raw: "Hello @foo")
expect(post.cooked).to eq(%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a></p>))
post.rebake!
expect(post.cooked).to eq(%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a></p>))
end
it "removes the username from the search index" do
SearchIndexer.enable
create_post_and_change_username(raw: "Hello @foo")
results = Search.execute("foo", min_search_term_length: 1)
expect(results.posts).to be_empty
end
it "ignores case when replacing mentions" do
post = create_post_and_change_username(raw: "There's no difference between @foo and @Foo")
expect(post.raw).to eq("There's no difference between @bar and @bar")
expect(post.cooked).to eq(
%Q(<p>There’s no difference between <a class="mention" href="/u/bar">@bar</a> and <a class="mention" href="/u/bar">@bar</a></p>),
)
end
it "replaces mentions when there are leading symbols" do
post = create_post_and_change_username(raw: ".@foo -@foo %@foo _@foo ,@foo ;@foo @@foo")
expect(post.raw).to eq(".@bar -@bar %@bar _@bar ,@bar ;@bar @@bar")
expect(post.cooked).to match_html <<~HTML
<p>.<a class="mention" href="/u/bar">@bar</a>
-<a class="mention" href="/u/bar">@bar</a>
%<a class="mention" href="/u/bar">@bar</a>
_<a class="mention" href="/u/bar">@bar</a>
,<a class="mention" href="/u/bar">@bar</a>
;<a class="mention" href="/u/bar">@bar</a>
@<a class="mention" href="/u/bar">@bar</a></p>
HTML
end
it "replaces mentions within double and single quotes" do
post = create_post_and_change_username(raw: %Q("@foo" '@foo'))
expect(post.raw).to eq(%Q("@bar" '@bar'))
expect(post.cooked).to eq(
%Q(<p>“<a class="mention" href="/u/bar">@bar</a>” ‘<a class="mention" href="/u/bar">@bar</a>’</p>),
)
end
it "replaces Markdown formatted mentions" do
post = create_post_and_change_username(raw: "**@foo** *@foo* _@foo_ ~~@foo~~")
expect(post.raw).to eq("**@bar** *@bar* _@bar_ ~~@bar~~")
expect(post.cooked).to match_html <<~HTML
<p><strong><a class="mention" href="/u/bar">@bar</a></strong>
<em><a class="mention" href="/u/bar">@bar</a></em>
<em><a class="mention" href="/u/bar">@bar</a></em>
<s><a class="mention" href="/u/bar">@bar</a></s></p>
HTML
end
it "replaces mentions when there are trailing symbols" do
post = create_post_and_change_username(raw: "@foo. @foo, @foo: @foo; @foo_ @foo-")
expect(post.raw).to eq("@bar. @bar, @bar: @bar; @bar_ @bar-")
expect(post.cooked).to match_html <<~HTML
<p><a class="mention" href="/u/bar">@bar</a>.
<a class="mention" href="/u/bar">@bar</a>,
<a class="mention" href="/u/bar">@bar</a>:
<a class="mention" href="/u/bar">@bar</a>;
<a class="mention" href="/u/bar">@bar</a>_
<a class="mention" href="/u/bar">@bar</a>-</p>
HTML
end
it "does not replace mention in cooked when mention contains a trailing underscore" do
# Older versions of Discourse detected a trailing underscore as part of a username.
# That doesn't happen anymore, so we need to do create the `cooked` for this test manually.
post =
create_post_and_change_username(raw: "@foobar @foo") do |p|
p.update_columns(
raw: p.raw.gsub("@foobar", "@foo_"),
cooked: p.cooked.gsub("@foobar", "@foo_"),
)
end
expect(post.raw).to eq("@bar_ @bar")
expect(post.cooked).to eq(
%Q(<p><span class="mention">@foo_</span> <a class="mention" href="/u/bar">@bar</a></p>),
)
end
it "does not replace mentions when there are leading alphanumeric chars" do
post = create_post_and_change_username(raw: "@foo a@foo 2@foo")
expect(post.raw).to eq("@bar a@foo 2@foo")
expect(post.cooked).to eq(
%Q(<p><a class="mention" href="/u/bar">@bar</a> a@foo 2@foo</p>),
)
end
it "does not replace username within email address" do
post = create_post_and_change_username(raw: "@foo [email protected]")
expect(post.raw).to eq("@bar [email protected]")
expect(post.cooked).to eq(
%Q(<p><a class="mention" href="/u/bar">@bar</a> <a href="mailto:[email protected]">[email protected]</a></p>),
)
end
it "does not replace username in a mention of a similar username" do
Fabricate(:user, username: "foobar")
Fabricate(:user, username: "foo-bar")
Fabricate(:user, username: "foo_bar")
Fabricate(:user, username: "foo1")
post = create_post_and_change_username(raw: "@foo @foobar @foo-bar @foo_bar @foo1")
expect(post.raw).to eq("@bar @foobar @foo-bar @foo_bar @foo1")
expect(post.cooked).to match_html <<~HTML
<p><a class="mention" href="/u/bar">@bar</a> <a class="mention" href="/u/foobar">@foobar</a> <a class="mention" href="/u/foo-bar">@foo-bar</a> <a class="mention" href="/u/foo_bar">@foo_bar</a> <a class="mention" href="/u/foo1">@foo1</a></p>
HTML
end
it "updates the path to the user even when it links to /user instead of /u" do
post = create_post_and_change_username(raw: "Hello @foo")
post.update_column(:cooked, post.cooked.gsub("/u/foo", "/users/foo"))
expect(post.raw).to eq("Hello @bar")
expect(post.cooked).to eq(%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a></p>))
end
it "replaces mentions within revisions" do
revisions = [
{ raw: "Hello Foo" },
{ title: "new topic title" },
{ raw: "Hello @foo!" },
{ raw: "Hello @foo!!" },
]
post = create_post_and_change_username(raw: "Hello @foo", revisions: revisions)
expect(post.raw).to eq("Hello @bar!!")
expect(post.cooked).to eq(%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!!</p>))
expect(post.revisions.count).to eq(4)
expect(post.revisions[0].modifications["raw"][0]).to eq("Hello @bar")
expect(post.revisions[0].modifications["raw"][1]).to eq("Hello Foo")
expect(post.revisions[0].modifications["cooked"][0]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a></p>),
)
expect(post.revisions[0].modifications["cooked"][1]).to eq("<p>Hello Foo</p>")
expect(post.revisions[1].modifications).to include("title")
expect(post.revisions[2].modifications["raw"][0]).to eq("Hello Foo")
expect(post.revisions[2].modifications["raw"][1]).to eq("Hello @bar!")
expect(post.revisions[2].modifications["cooked"][0]).to eq("<p>Hello Foo</p>")
expect(post.revisions[2].modifications["cooked"][1]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!</p>),
)
expect(post.revisions[3].modifications["raw"][0]).to eq("Hello @bar!")
expect(post.revisions[3].modifications["raw"][1]).to eq("Hello @bar!!")
expect(post.revisions[3].modifications["cooked"][0]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!</p>),
)
expect(post.revisions[3].modifications["cooked"][1]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!!</p>),
)
end
it "replaces mentions in posts marked for deletion" do
post =
create_post_and_change_username(raw: "Hello @foo") do |p|
PostDestroyer.new(p.user, p).destroy
end
expect(post.raw).to_not include("@foo")
expect(post.cooked).to_not include("foo")
expect(post.revisions.count).to eq(1)
expect(post.revisions[0].modifications["raw"][0]).to eq("Hello @bar")
expect(post.revisions[0].modifications["cooked"][0]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a></p>),
)
end
it "works when users are mentioned with HTML" do
post =
create_post_and_change_username(
raw: '<a class="mention">@foo</a> and <a class="mention">@someuser</a>',
)
expect(post.raw).to eq('<a class="mention">@bar</a> and <a class="mention">@someuser</a>')
expect(post.cooked).to match_html(
'<p><a class="mention">@bar</a> and <a class="mention">@someuser</a></p>',
)
end
it "replaces mentions of oneself in posts" do
post = create_post_and_change_username(raw: "Hello @#{user.username}", user: user)
expect(post.raw).to eq("Hello @bar")
expect(post.cooked).to eq(%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a></p>))
end
it "replaces mentions of oneself in revisions" do
revisions = [
{ raw: "Hello Foo" },
{ title: "new topic title" },
{ raw: "Hello @#{user.username}!" },
{ raw: "Hello @#{user.username}!!" },
]
post =
create_post_and_change_username(raw: "Hello @#{user.username}", revisions: revisions)
expect(post.raw).to eq("Hello @bar!!")
expect(post.cooked).to eq(%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!!</p>))
expect(post.revisions.count).to eq(4)
expect(post.revisions[0].modifications["raw"][0]).to eq("Hello @bar")
expect(post.revisions[0].modifications["cooked"][0]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a></p>),
)
expect(post.revisions[0].modifications["cooked"][1]).to eq("<p>Hello Foo</p>")
expect(post.revisions[1].modifications).to include("title")
expect(post.revisions[2].modifications["raw"][0]).to eq("Hello Foo")
expect(post.revisions[2].modifications["raw"][1]).to eq("Hello @bar!")
expect(post.revisions[2].modifications["cooked"][0]).to eq("<p>Hello Foo</p>")
expect(post.revisions[2].modifications["cooked"][1]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!</p>),
)
expect(post.revisions[3].modifications["raw"][0]).to eq("Hello @bar!")
expect(post.revisions[3].modifications["raw"][1]).to eq("Hello @bar!!")
expect(post.revisions[3].modifications["cooked"][0]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!</p>),
)
expect(post.revisions[3].modifications["cooked"][1]).to eq(
%Q(<p>Hello <a class="mention" href="/u/bar">@bar</a>!!</p>),
)
end
context "when using Unicode usernames" do
before { SiteSetting.unicode_usernames = true }
let(:user) { Fabricate(:user, username: "թռչուն") }
it "it correctly updates mentions" do
post = create_post_and_change_username(raw: "Hello @թռչուն", target_username: "птица")
expect(post.raw).to eq("Hello @птица")
expect(post.cooked).to eq(
%Q(<p>Hello <a class="mention" href="/u/%D0%BF%D1%82%D0%B8%D1%86%D0%B0">@птица</a></p>),
)
end
it "does not replace mentions when there are leading alphanumeric chars" do
post =
create_post_and_change_username(
raw: "Hello @թռչուն 鳥@թռչուն 2@թռչուն ٩@թռչուն",
target_username: "птица",
)
expect(post.raw).to eq("Hello @птица 鳥@թռչուն 2@թռչուն ٩@թռչուն")
expect(post.cooked).to eq(
%Q(<p>Hello <a class="mention" href="/u/%D0%BF%D1%82%D0%B8%D1%86%D0%B0">@птица</a> 鳥@թռչուն 2@թռչուն ٩@թռչուն</p>),
)
end
it "does not replace username in a mention of a similar username" do
Fabricate(:user, username: "թռչուն鳥")
Fabricate(:user, username: "թռչուն-鳥")
Fabricate(:user, username: "թռչուն_鳥")
Fabricate(:user, username: "թռչուն٩")
post =
create_post_and_change_username(
raw: "@թռչուն @թռչուն鳥 @թռչուն-鳥 @թռչուն_鳥 @թռչուն٩",
target_username: "птица",
)
expect(post.raw).to eq("@птица @թռչուն鳥 @թռչուն-鳥 @թռչուն_鳥 @թռչուն٩")
expect(post.cooked).to match_html <<~HTML
<p><a class="mention" href="/u/%D0%BF%D1%82%D0%B8%D1%86%D0%B0">@птица</a> <a class="mention" href="/u/%D5%A9%D5%BC%D5%B9%D5%B8%D6%82%D5%B6%E9%B3%A5">@թռչուն鳥</a> <a class="mention" href="/u/%D5%A9%D5%BC%D5%B9%D5%B8%D6%82%D5%B6-%E9%B3%A5">@թռչուն-鳥</a> <a class="mention" href="/u/%D5%A9%D5%BC%D5%B9%D5%B8%D6%82%D5%B6_%E9%B3%A5">@թռչուն_鳥</a> <a class="mention" href="/u/%D5%A9%D5%BC%D5%B9%D5%B8%D6%82%D5%B6%D9%A9">@թռչուն٩</a></p>
HTML
end
end
end
context "when there are quotes" do
let(:quoted_post) do
create_post(user: user, topic: topic, post_number: 1, raw: "quoted post")
end
let(:avatar_url) { user.avatar_template_url.gsub("{size}", "48") }
it "replaces the username in quote tags and updates avatar" do
post = create_post_and_change_username(raw: <<~RAW)
Lorem ipsum
[quote="foo, post:1, topic:#{quoted_post.topic.id}"]
quoted post
[/quote]
[quote='foo']
quoted post
[/quote]
[quote=foo, post:1, topic:#{quoted_post.topic.id}]
quoted post
[/quote]
dolor sit amet
RAW
expect(post.raw).to eq(<<~RAW.strip)
Lorem ipsum
[quote="bar, post:1, topic:#{quoted_post.topic.id}"]
quoted post
[/quote]
[quote='bar']
quoted post
[/quote]
[quote=bar, post:1, topic:#{quoted_post.topic.id}]
quoted post
[/quote]
dolor sit amet
RAW
expect(post.cooked).to match_html <<~HTML
<p>Lorem ipsum</p>
<aside class="quote no-group" data-username="bar" data-post="1" data-topic="#{quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt='' width="24" height="24" src="#{avatar_url}" class="avatar"> bar:</div>
<blockquote>
<p>quoted post</p>
</blockquote>
</aside>
<aside class="quote no-group" data-username="bar">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt="" width="24" height="24" src="#{avatar_url}" class="avatar"> bar:</div>
<blockquote>
<p>quoted post</p>
</blockquote>
</aside>
<aside class="quote no-group" data-username="bar" data-post="1" data-topic="#{quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt="" width="24" height="24" src="#{avatar_url}" class="avatar"> bar:</div>
<blockquote>
<p>quoted post</p>
</blockquote>
</aside>
<p>dolor sit amet</p>
HTML
end
it "replaces the username in new quote format" do
post = create_post_and_change_username(raw: <<~RAW)
Lorem ipsum
[quote="Foo Bar, post:1, topic:#{quoted_post.topic.id}, username:foo"]
quoted post
[/quote]
dolor sit amet
RAW
expect(post.raw).to eq(<<~RAW.strip)
Lorem ipsum
[quote="Foo Bar, post:1, topic:#{quoted_post.topic.id}, username:bar"]
quoted post
[/quote]
dolor sit amet
RAW
expect(post.cooked).to match_html(<<~HTML)
<p>Lorem ipsum</p>
<aside class="quote no-group" data-username="bar" data-post="1" data-topic="#{quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt="" width="24" height="24" src="//test.localhost/letter_avatar_proxy/v4/letter/b/b77776/48.png" class="avatar"> Foo Bar:</div>
<blockquote>
<p>quoted post</p>
</blockquote>
</aside>
<p>dolor sit amet</p>
HTML
end
context "when there is a simple quote" do
let(:raw) { <<~RAW }
Lorem ipsum
[quote="foo, post:1, topic:#{quoted_post.topic.id}"]
quoted
[/quote]
RAW
let(:expected_raw) { <<~RAW.strip }
Lorem ipsum
[quote="bar, post:1, topic:#{quoted_post.topic.id}"]
quoted
[/quote]
RAW
let(:expected_cooked) { <<~HTML.rstrip }
<p>Lorem ipsum</p>
<aside class="quote no-group" data-username="bar" data-post="1" data-topic="#{quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt='' width="24" height="24" src="#{avatar_url}" class="avatar"> bar:</div>
<blockquote>
<p>quoted</p>
</blockquote>
</aside>
HTML
it "replaces the username in quote tags when the post is deleted" do
post =
create_post_and_change_username(raw: raw) do |p|
PostDestroyer.new(Discourse.system_user, p).destroy
end
expect(post.raw).to eq(expected_raw)
expect(post.cooked).to match_html(expected_cooked)
end
end
end
context "when there are oneboxes" do
let(:quoted_post) do
create_post(user: user, topic: topic, post_number: 1, raw: "quoted post")
end
let(:avatar_url) { user_avatar_url(user) }
let(:evil_trout) { Fabricate(:evil_trout) }
let(:another_quoted_post) do
create_post(user: evil_trout, topic: topic, post_number: 2, raw: "evil post")
end
def protocol_relative_url(url)
url.sub(/^https?:/, "")
end
def user_avatar_url(u)
u.avatar_template_url.gsub("{size}", "48")
end
it "updates avatar for linked topics and posts" do
raw = "#{quoted_post.full_url}\n#{quoted_post.topic.url}"
post = create_post_and_change_username(raw: raw)
expect(post.raw).to eq(raw)
expect(post.cooked).to match_html <<~HTML
<aside class="quote" data-post="#{quoted_post.post_number}" data-topic="#{quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt="" width="24" height="24" src="#{avatar_url}" class="avatar">
<a href="#{protocol_relative_url(quoted_post.full_url)}">#{quoted_post.topic.title}</a>
</div>
<blockquote>
quoted post
</blockquote>
</aside>
<aside class="quote" data-post="#{quoted_post.post_number}" data-topic="#{quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt="" width="24" height="24" src="#{avatar_url}" class="avatar">
<a href="#{protocol_relative_url(quoted_post.topic.url)}">#{quoted_post.topic.title}</a>
</div>
<blockquote>
quoted post
</blockquote>
</aside>
HTML
end
it "does not update the wrong avatar" do
raw = "#{quoted_post.full_url}\n#{another_quoted_post.full_url}"
post = create_post_and_change_username(raw: raw)
expect(post.raw).to eq(raw)
expect(post.cooked).to match_html <<~HTML
<aside class="quote" data-post="#{quoted_post.post_number}" data-topic="#{quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt="" width="24" height="24" src="#{avatar_url}" class="avatar">
<a href="#{protocol_relative_url(quoted_post.full_url)}">#{quoted_post.topic.title}</a>
</div>
<blockquote>
quoted post
</blockquote>
</aside>
<aside class="quote" data-post="#{another_quoted_post.post_number}" data-topic="#{another_quoted_post.topic.id}">
<div class="title">
<div class="quote-controls"></div>
<img loading="lazy" alt="" width="24" height="24" src="#{user_avatar_url(evil_trout)}" class="avatar">
<a href="#{protocol_relative_url(another_quoted_post.full_url)}">#{another_quoted_post.topic.title}</a>
</div>
<blockquote>
evil post
</blockquote>
</aside>
HTML
end
end
it "updates username in small action posts" do
invited_by = Fabricate(:user)
p1 = topic.add_small_action(invited_by, "invited_user", "foo")
p2 = topic.add_small_action(invited_by, "invited_user", "foobar")
UsernameChanger.change(user, "bar")
expect(p1.reload.custom_fields["action_code_who"]).to eq("bar")
expect(p2.reload.custom_fields["action_code_who"]).to eq("foobar")
end
end
context "when there are notifications" do
def create_notification(type, notified_user, post, data = {})
Fabricate(
:notification,
notification_type: Notification.types[type],
user: notified_user,
data: data.to_json,
topic: post&.topic,
post_number: post&.post_number,
)
end
def notification_data(notification)
JSON.parse(notification.reload.data, symbolize_names: true)
end
def original_and_display_username(username)
{ original_username: username, display_username: username, foo: "bar" }
end
def original_username_and_some_text_as_display_username(username)
{ original_username: username, display_username: "some text", foo: "bar" }
end
def only_display_username(username)
{ display_username: username }
end
def username_and_something_else(username)
{ username: username, foo: "bar" }
end
it "replaces usernames in notifications" do
renamed_user = Fabricate(:user, username: "alice")
another_user = Fabricate(:user, username: "another_user")
notified_user = Fabricate(:user)
p1 = Fabricate(:post, post_number: 1, user: renamed_user)
p2 = Fabricate(:post, post_number: 1, user: another_user)
Fabricate(
:invited_user,
invite: Fabricate(:invite, invited_by: notified_user),
user: renamed_user,
)
Fabricate(
:invited_user,
invite: Fabricate(:invite, invited_by: notified_user),
user: another_user,
)
n01 =
create_notification(:mentioned, notified_user, p1, original_and_display_username("alice"))
n02 =
create_notification(
:mentioned,
notified_user,
p2,
original_and_display_username("another_user"),
)
n03 =
create_notification(
:mentioned,
notified_user,
p1,
original_username_and_some_text_as_display_username("alice"),
)
n04 = create_notification(:mentioned, notified_user, p1, only_display_username("alice"))
n05 =
create_notification(:invitee_accepted, notified_user, nil, only_display_username("alice"))
n06 =
create_notification(
:invitee_accepted,
notified_user,
nil,
only_display_username("another_user"),
)
n07 =
create_notification(
:granted_badge,
renamed_user,
nil,
username_and_something_else("alice"),
)
n08 =
create_notification(
:granted_badge,
another_user,
nil,
username_and_something_else("another_user"),
)
n09 =
create_notification(
:group_message_summary,
renamed_user,
nil,
username_and_something_else("alice"),
)
n10 =
create_notification(
:group_message_summary,
another_user,
nil,
username_and_something_else("another_user"),
)
UsernameChanger.change(renamed_user, "bob")
expect(notification_data(n01)).to eq(original_and_display_username("bob"))
expect(notification_data(n02)).to eq(original_and_display_username("another_user"))
expect(notification_data(n03)).to eq(
original_username_and_some_text_as_display_username("bob"),
)
expect(notification_data(n04)).to eq(only_display_username("bob"))
expect(notification_data(n05)).to eq(only_display_username("bob"))
expect(notification_data(n06)).to eq(only_display_username("another_user"))
expect(notification_data(n07)).to eq(username_and_something_else("bob"))
expect(notification_data(n08)).to eq(username_and_something_else("another_user"))
expect(notification_data(n09)).to eq(username_and_something_else("bob"))
expect(notification_data(n10)).to eq(username_and_something_else("another_user"))
end
end
end
describe "#override" do
common_test_cases = [
["overrides the username if a new name is different", "john", "bill", "bill", false],
["does not change the username if a new name is the same", "john", "john", "john", false],
["overrides the username if a new name has different case", "john", "JoHN", "JoHN", false],
]
context "when unicode_usernames is off" do
before { SiteSetting.unicode_usernames = false }
[
*common_test_cases,
[
"does not change the username if a new name after unicode normalization is the same",
"john",
"john¥¥",
"john",
],
].each do |testcase_name, current, new, overrode|
it "#{testcase_name}" do
user = Fabricate(:user, username: current)
UsernameChanger.override(user, new)
expect(user.username).to eq(overrode)
end
end
it "overrides the username with username suggestions in case the username is already taken" do
user = Fabricate(:user, username: "bill")
Fabricate(:user, username: "john")
UsernameChanger.override(user, "john")
expect(user.username).to eq("john1")
end
end
context "when unicode_usernames is on" do
before { SiteSetting.unicode_usernames = true }
[
*common_test_cases,
[
"overrides the username if a new name after unicode normalization is different only in case",
"lo\u0308we",
"L\u00F6wee",
"L\u00F6wee",
],
].each do |testcase_name, current, new, overrode|
it "#{testcase_name}" do
user = Fabricate(:user, username: current)
UsernameChanger.override(user, new)
expect(user.username).to eq(overrode)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class GroupActionLogger
def initialize(acting_user, group)
@acting_user = acting_user
@group = group
end
def log_make_user_group_owner(target_user)
GroupHistory.create!(
default_params.merge(
action: GroupHistory.actions[:make_user_group_owner],
target_user: target_user,
),
)
end
def log_remove_user_as_group_owner(target_user)
GroupHistory.create!(
default_params.merge(
action: GroupHistory.actions[:remove_user_as_group_owner],
target_user: target_user,
),
)
end
def log_add_user_to_group(target_user, subject = nil)
GroupHistory.create!(
default_params.merge(
action: GroupHistory.actions[:add_user_to_group],
target_user: target_user,
subject: subject,
),
)
end
def log_remove_user_from_group(target_user, subject = nil)
GroupHistory.create!(
default_params.merge(
action: GroupHistory.actions[:remove_user_from_group],
target_user: target_user,
subject: subject,
),
)
end
def log_change_group_settings
@group
.previous_changes
.except(*excluded_attributes)
.each do |attribute_name, value|
next if value[0].blank? && value[1].blank?
GroupHistory.create!(
default_params.merge(
action: GroupHistory.actions[:change_group_setting],
subject: attribute_name,
prev_value: value[0],
new_value: value[1],
),
)
end
end
private
def excluded_attributes
%i[bio_cooked updated_at created_at user_count]
end
def default_params
{ group: @group, acting_user: @acting_user }
end
end
``` | # frozen_string_literal: true
RSpec.describe GroupActionLogger do
subject(:logger) { described_class.new(group_owner, group) }
fab!(:group_owner) { Fabricate(:user) }
fab!(:group)
fab!(:user)
before { group.add_owner(group_owner) }
describe "#log_make_user_group_owner" do
it "should create the right record" do
logger.log_make_user_group_owner(user)
group_history = GroupHistory.last
expect(group_history.action).to eq(GroupHistory.actions[:make_user_group_owner])
expect(group_history.acting_user).to eq(group_owner)
expect(group_history.target_user).to eq(user)
end
end
describe "#log_remove_user_as_group_owner" do
it "should create the right record" do
logger.log_remove_user_as_group_owner(user)
group_history = GroupHistory.last
expect(group_history.action).to eq(GroupHistory.actions[:remove_user_as_group_owner])
expect(group_history.acting_user).to eq(group_owner)
expect(group_history.target_user).to eq(user)
end
end
describe "#log_add_user_to_group" do
context "as a group owner" do
it "should create the right record" do
logger.log_add_user_to_group(user)
group_history = GroupHistory.last
expect(group_history.action).to eq(GroupHistory.actions[:add_user_to_group])
expect(group_history.acting_user).to eq(group_owner)
expect(group_history.target_user).to eq(user)
end
end
context "as a normal user" do
subject(:logger) { described_class.new(user, group) }
before { group.update!(public_admission: true) }
it "should create the right record" do
logger.log_add_user_to_group(user)
group_history = GroupHistory.last
expect(group_history.action).to eq(GroupHistory.actions[:add_user_to_group])
expect(group_history.acting_user).to eq(user)
expect(group_history.target_user).to eq(user)
end
end
end
describe "#log_remove_user_from_group" do
context "as group owner" do
it "should create the right record" do
logger.log_remove_user_from_group(user)
group_history = GroupHistory.last
expect(group_history.action).to eq(GroupHistory.actions[:remove_user_from_group])
expect(group_history.acting_user).to eq(group_owner)
expect(group_history.target_user).to eq(user)
end
end
context "as a normal user" do
subject(:logger) { described_class.new(user, group) }
before { group.update!(public_exit: true) }
it "should create the right record" do
logger.log_remove_user_from_group(user)
group_history = GroupHistory.last
expect(group_history.action).to eq(GroupHistory.actions[:remove_user_from_group])
expect(group_history.acting_user).to eq(user)
expect(group_history.target_user).to eq(user)
end
end
end
describe "#log_change_group_settings" do
it "should create the right record" do
group.update!(public_admission: true, created_at: Time.zone.now)
expect { logger.log_change_group_settings }.to change { GroupHistory.count }.by(1)
group_history = GroupHistory.last
expect(group_history.action).to eq(GroupHistory.actions[:change_group_setting])
expect(group_history.acting_user).to eq(group_owner)
expect(group_history.subject).to eq("public_admission")
expect(group_history.prev_value).to eq("f")
expect(group_history.new_value).to eq("t")
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class GroupMentionsUpdater
def self.update(current_name, previous_name)
Post
.where(
"cooked LIKE '%class=\"mention-group%' AND raw LIKE :previous_name",
previous_name: "%@#{previous_name}%",
)
.find_in_batches do |posts|
posts.each do |post|
post.raw.gsub!(/(^|\s)(@#{previous_name})(\s|$)/, "\\1@#{current_name}\\3")
post.save!(validate: false)
end
end
end
end
``` | # frozen_string_literal: true
RSpec.describe GroupMentionsUpdater do
fab!(:post)
before { Jobs.run_immediately! }
describe ".update" do
it "should update valid group mentions" do
new_group_name = "awesome_team"
old_group_name = "team"
[
["@#{old_group_name} is awesome!", "@#{new_group_name} is awesome!"],
["This @#{old_group_name} is awesome!", "This @#{new_group_name} is awesome!"],
["Mention us @ @#{old_group_name}", "Mention us @ @#{new_group_name}"],
].each do |raw, expected_raw|
group =
Fabricate(:group, name: old_group_name, mentionable_level: Group::ALIAS_LEVELS[:everyone])
post.update!(raw: raw)
group.update!(name: new_group_name)
post.reload
expect(post.raw_mentions).to eq([new_group_name])
expect(post.raw).to eq(expected_raw)
group.destroy!
end
end
it "should not update invalid group mentions" do
group = Fabricate(:group, name: "team", mentionable_level: Group::ALIAS_LEVELS[:everyone])
post.update!(raw: "This is not [email protected]")
expect(post.reload.raw_mentions).to eq([])
group.update!(name: "new_team_name")
expect(post.reload.raw_mentions).to eq([])
end
it "should ignore validations" do
everyone_mention_level = Group::ALIAS_LEVELS[:everyone]
%w[awesome_team pro_team].each do |name|
Fabricate(:group, name: name, mentionable_level: everyone_mention_level)
end
post.update!(raw: "@awesome_team is cool and so is @pro_team")
SiteSetting.max_mentions_per_post = 1
GroupMentionsUpdater.update("cool_team", "awesome_team")
post.reload
expect(post.raw_mentions).to match_array(%w[cool_team pro_team])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PostActionNotifier
def self.disable
@disabled = true
end
def self.enable
@disabled = false
end
# For testing purposes
def self.reset!
@custom_post_revision_notifier_recipients = nil
end
def self.alerter
@alerter ||= PostAlerter.new
end
def self.refresh_like_notification(post, read)
return unless post && post.user_id && post.topic
usernames =
post
.post_actions
.where(post_action_type_id: PostActionType.types[:like])
.joins(:user)
.order("post_actions.created_at desc")
.where("post_actions.created_at > ?", 1.day.ago)
.pluck(:username)
if usernames.length > 0
data = {
topic_title: post.topic.title,
username: usernames[0],
display_username: usernames[0],
username2: usernames[1],
count: usernames.length,
}
Notification.create(
notification_type: Notification.types[:liked],
topic_id: post.topic_id,
post_number: post.post_number,
user_id: post.user_id,
read: read,
data: data.to_json,
)
end
end
def self.post_action_deleted(post_action)
return if @disabled
# We only care about deleting post actions for now
return if post_action.deleted_at.blank?
if post_action.post_action_type_id == PostActionType.types[:like] && post_action.post
read = true
Notification
.where(
topic_id: post_action.post.topic_id,
user_id: post_action.post.user_id,
post_number: post_action.post.post_number,
notification_type: Notification.types[:liked],
)
.each do |notification|
read = false unless notification.read
notification.destroy
end
refresh_like_notification(post_action.post, read)
else
# not using destroy_all cause we want stuff to trigger
Notification.where(post_action_id: post_action.id).each(&:destroy)
end
end
def self.post_action_created(post_action)
return if @disabled
# We only notify on likes for now
return unless post_action.is_like?
post = post_action.post
return if post_action.user.blank? || post.blank?
alerter.create_notification(
post.user,
Notification.types[:liked],
post,
display_username: post_action.user.username,
post_action_id: post_action.id,
user_id: post_action.user_id,
)
end
def self.after_create_post_revision(post_revision)
return if @disabled
post = post_revision.post
return unless post
return if post_revision.user.blank?
return if post.topic.blank?
return if post.topic.private_message?
return if notification_is_disabled?(post_revision)
user_ids = []
user_ids << post.user_id if post_revision.user_id != post.user_id
# Notify all users watching the topic when the OP of a wiki topic is edited
# or if the topic category allows unlimited owner edits on the OP.
if post.is_first_post? &&
(post.wiki? || post.topic.category_allows_unlimited_owner_edits_on_first_post?)
user_ids.concat(
TopicUser
.watching(post.topic_id)
.where.not(user_id: post_revision.user_id)
.where(topic: post.topic)
.pluck(:user_id),
)
end
custom_post_revision_notifier_recipients.each do |block|
user_ids.concat(Array(block.call(post_revision)))
end
if user_ids.present?
DB.after_commit do
Jobs.enqueue(:notify_post_revision, user_ids: user_ids, post_revision_id: post_revision.id)
end
end
end
def self.after_post_unhide(post, flaggers)
return if @disabled || post.last_editor.blank? || flaggers.blank?
flaggers.each do |flagger|
alerter.create_notification(
flagger,
Notification.types[:edited],
post,
display_username: post.last_editor.username,
acting_user_id: post.last_editor.id,
)
end
end
def self.custom_post_revision_notifier_recipients
@custom_post_revision_notifier_recipients ||= Set.new
end
def self.add_post_revision_notifier_recipients(&block)
custom_post_revision_notifier_recipients << block
end
private
def self.notification_is_disabled?(post_revision)
modifications = post_revision.modifications
(
SiteSetting.disable_system_edit_notifications &&
post_revision.user_id == Discourse::SYSTEM_USER_ID
) ||
(
SiteSetting.disable_category_edit_notifications &&
modifications&.dig("category_id").present?
) || (SiteSetting.disable_tags_edit_notifications && modifications&.dig("tags").present?)
end
end
``` | # frozen_string_literal: true
RSpec.describe PostActionNotifier do
before do
PostActionNotifier.enable
Jobs.run_immediately!
end
fab!(:evil_trout)
fab!(:post)
context "when editing a post" do
it "notifies a user of the revision" do
expect { post.revise(evil_trout, raw: "world is the new body of the message") }.to change {
post.reload.user.notifications.count
}.by(1)
end
it "notifies watching users of revision when post is wiki-ed and first post in topic" do
SiteSetting.editing_grace_period_max_diff = 1
post.update!(wiki: true)
owner = post.user
user2 = Fabricate(:user)
user3 = Fabricate(:user)
TopicUser.change(
user2.id,
post.topic,
notification_level: TopicUser.notification_levels[:watching],
)
TopicUser.change(
user3.id,
post.topic,
notification_level: TopicUser.notification_levels[:tracking],
)
expect do
post.revise(Fabricate(:user), raw: "I made some changes to the wiki!")
end.to change { Notification.count }.by(2)
edited_notification_type = Notification.types[:edited]
expect(Notification.exists?(user: owner, notification_type: edited_notification_type)).to eq(
true,
)
expect(Notification.exists?(user: user2, notification_type: edited_notification_type)).to eq(
true,
)
expect do post.revise(owner, raw: "I made some changes to the wiki again!") end.to change {
Notification.where(notification_type: edited_notification_type).count
}.by(1)
expect(
Notification.where(user: user2, notification_type: edited_notification_type).count,
).to eq(2)
expect do post.revise(user2, raw: "I changed the wiki totally") end.to change {
Notification.where(notification_type: edited_notification_type).count
}.by(1)
expect(
Notification.where(user: owner, notification_type: edited_notification_type).count,
).to eq(2)
end
it "notifies watching users of revision when topic category allow_unlimited_owner_edits_on_first_post and first post in topic is edited" do
SiteSetting.editing_grace_period_max_diff = 1
post.topic.update(
category: Fabricate(:category, allow_unlimited_owner_edits_on_first_post: true),
)
owner = post.user
user2 = Fabricate(:user)
user3 = Fabricate(:user)
TopicUser.change(
user2.id,
post.topic,
notification_level: TopicUser.notification_levels[:watching],
)
TopicUser.change(
user3.id,
post.topic,
notification_level: TopicUser.notification_levels[:tracking],
)
expect do
post.revise(Fabricate(:user), raw: "I made some changes to the first post!")
end.to change { Notification.count }.by(2)
edited_notification_type = Notification.types[:edited]
expect(Notification.exists?(user: owner, notification_type: edited_notification_type)).to eq(
true,
)
expect(Notification.exists?(user: user2, notification_type: edited_notification_type)).to eq(
true,
)
expect do
post.revise(owner, raw: "I made some changes to the first post again!")
end.to change { Notification.where(notification_type: edited_notification_type).count }.by(1)
expect(
Notification.where(user: user2, notification_type: edited_notification_type).count,
).to eq(2)
expect do post.revise(user2, raw: "I changed the first post totally") end.to change {
Notification.where(notification_type: edited_notification_type).count
}.by(1)
expect(
Notification.where(user: owner, notification_type: edited_notification_type).count,
).to eq(2)
end
it "stores the revision number with the notification" do
post.revise(evil_trout, raw: "world is the new body of the message")
notification_data = JSON.parse post.user.notifications.last.data
expect(notification_data["revision_number"]).to eq post.post_revisions.last.number
end
context "when edit notifications are disabled" do
before { SiteSetting.disable_system_edit_notifications = true }
it "notifies a user of the revision made by another user" do
expect { post.revise(evil_trout, raw: "world is the new body of the message") }.to change(
post.user.notifications,
:count,
).by(1)
end
it "does not notify a user of the revision made by the system user" do
expect {
post.revise(Discourse.system_user, raw: "world is the new body of the message")
}.not_to change(post.user.notifications, :count)
end
end
context "when category edit notifications are disabled" do
it "notifies a user of the revision made by another user" do
SiteSetting.disable_category_edit_notifications = false
expect { post.revise(evil_trout, category_id: Fabricate(:category).id) }.to change(
post.user.notifications,
:count,
).by(1)
end
it "does not notify a user of the revision made by the system user" do
SiteSetting.disable_category_edit_notifications = true
expect { post.revise(evil_trout, category_id: Fabricate(:category).id) }.not_to change(
post.user.notifications,
:count,
)
end
end
context "when tags edit notifications are disabled" do
it "notifies a user of the revision made by another user" do
SiteSetting.disable_tags_edit_notifications = false
expect { post.revise(evil_trout, tags: [Fabricate(:tag).name]) }.to change(
post.user.notifications,
:count,
).by(1)
end
it "does not notify a user of the revision made by the system user" do
SiteSetting.disable_tags_edit_notifications = true
expect { post.revise(evil_trout, tags: [Fabricate(:tag).name]) }.not_to change(
post.user.notifications,
:count,
)
end
end
context "when using plugin API to add custom recipients" do
let(:lurker) { Fabricate(:user) }
before do
plugin = Plugin::Instance.new
plugin.add_post_revision_notifier_recipients { |post_revision| [lurker.id] }
end
after { PostActionNotifier.reset! }
it "notifies the specified user of the revision" do
expect { post.revise(evil_trout, raw: "world is the new body of the message") }.to change {
lurker.notifications.count
}.by(1)
end
end
end
context "with private message" do
fab!(:private_message) do
Fabricate(:topic, archetype: Archetype.private_message, category_id: nil)
end
fab!(:user)
fab!(:mention_post) do
Fabricate(:post, topic: private_message, user: user, raw: "Hello @eviltrout")
end
it "won't notify someone who can't see the post" do
expect { PostAlerter.post_created(mention_post) }.not_to change(
evil_trout.notifications,
:count,
)
end
it "creates like notifications" do
other_user = Fabricate(:user)
private_message.allowed_users << user << other_user
expect { PostActionCreator.like(other_user, mention_post) }.to change(
user.notifications,
:count,
)
end
end
context "with moderator action post" do
fab!(:user)
fab!(:first_post) { Fabricate(:post, user: user, raw: "A useless post for you.") }
let(:topic) { first_post.topic }
it "should not notify anyone" do
expect {
Fabricate(
:post,
topic: topic,
raw: "This topic is CLOSED",
post_type: Post.types[:moderator_action],
)
}.to_not change { Notification.count }
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ColorSchemeRevisor
def initialize(color_scheme, params = {})
@color_scheme = color_scheme
@params = params
end
def self.revise(color_scheme, params)
self.new(color_scheme, params).revise
end
def revise
ColorScheme.transaction do
@color_scheme.name = @params[:name] if @params.has_key?(:name)
@color_scheme.user_selectable = @params[:user_selectable] if @params.has_key?(
:user_selectable,
)
@color_scheme.base_scheme_id = @params[:base_scheme_id] if @params.has_key?(:base_scheme_id)
has_colors = @params[:colors]
if has_colors
@params[:colors].each do |c|
if existing = @color_scheme.colors_by_name[c[:name]]
existing.update(c)
else
@color_scheme.color_scheme_colors << ColorSchemeColor.new(name: c[:name], hex: c[:hex])
end
end
@color_scheme.clear_colors_cache
end
if has_colors || @color_scheme.saved_change_to_name? ||
@color_scheme.will_save_change_to_user_selectable? ||
@color_scheme.saved_change_to_base_scheme_id?
@color_scheme.save
end
end
@color_scheme
end
end
``` | # frozen_string_literal: true
RSpec.describe ColorSchemeRevisor do
let(:color) { Fabricate.build(:color_scheme_color, hex: "FFFFFF", color_scheme: nil) }
let(:color_scheme) do
Fabricate(
:color_scheme,
created_at: 1.day.ago,
updated_at: 1.day.ago,
color_scheme_colors: [color],
)
end
let(:valid_params) { { name: color_scheme.name, colors: nil } }
describe "revise" do
it "does nothing if there are no changes" do
expect {
ColorSchemeRevisor.revise(color_scheme, valid_params.merge(colors: nil))
}.to_not change { color_scheme.reload.updated_at }
end
it "can change the name" do
ColorSchemeRevisor.revise(color_scheme, valid_params.merge(name: "Changed Name"))
expect(color_scheme.reload.name).to eq("Changed Name")
end
it "can update the base_scheme_id" do
ColorSchemeRevisor.revise(color_scheme, valid_params.merge(base_scheme_id: "test"))
expect(color_scheme.reload.base_scheme_id).to eq("test")
end
it "can change colors" do
ColorSchemeRevisor.revise(
color_scheme,
valid_params.merge(
colors: [{ name: color.name, hex: "BEEF99" }, { name: "bob", hex: "AAAAAA" }],
),
)
color_scheme.reload
expect(color_scheme.version).to eq(2)
expect(color_scheme.colors.size).to eq(2)
expect(color_scheme.colors.find_by(name: color.name).hex).to eq("BEEF99")
expect(color_scheme.colors.find_by(name: "bob").hex).to eq("AAAAAA")
end
it "doesn't make changes when a color is invalid" do
expect {
cs =
ColorSchemeRevisor.revise(
color_scheme,
valid_params.merge(colors: [{ name: color.name, hex: "OOPS" }]),
)
expect(cs).not_to be_valid
expect(cs.errors).to be_present
}.to_not change { color_scheme.reload.version }
expect(color_scheme.colors.first.hex).to eq(color.hex)
end
it "can change the user_selectable column" do
expect(color_scheme.user_selectable).to eq(false)
ColorSchemeRevisor.revise(color_scheme, { user_selectable: true })
expect(color_scheme.reload.user_selectable).to eq(true)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserUpdater
CATEGORY_IDS = {
watched_first_post_category_ids: :watching_first_post,
watched_category_ids: :watching,
tracked_category_ids: :tracking,
regular_category_ids: :regular,
muted_category_ids: :muted,
}
TAG_NAMES = {
watching_first_post_tags: :watching_first_post,
watched_tags: :watching,
tracked_tags: :tracking,
muted_tags: :muted,
}
OPTION_ATTR = %i[
mailing_list_mode
mailing_list_mode_frequency
email_digests
email_level
email_messages_level
external_links_in_new_tab
enable_quoting
enable_defer
color_scheme_id
dark_scheme_id
dynamic_favicon
automatically_unpin_topics
digest_after_minutes
new_topic_duration_minutes
auto_track_topics_after_msecs
notification_level_when_replying
email_previous_replies
email_in_reply_to
like_notification_frequency
include_tl0_in_digests
theme_ids
allow_private_messages
enable_allowed_pm_users
homepage_id
hide_profile_and_presence
text_size
title_count_mode
timezone
skip_new_user_tips
seen_popups
default_calendar
bookmark_auto_delete_preference
sidebar_link_to_filtered_list
sidebar_show_count_of_new_items
watched_precedence_over_muted
]
NOTIFICATION_SCHEDULE_ATTRS = -> do
attrs = [:enabled]
7.times do |n|
attrs.push("day_#{n}_start_time".to_sym)
attrs.push("day_#{n}_end_time".to_sym)
end
{ user_notification_schedule: attrs }
end.call
def initialize(actor, user)
@user = user
@user_guardian = Guardian.new(user)
@guardian = Guardian.new(actor)
@actor = actor
end
def update(attributes = {})
user_profile = user.user_profile
user_profile.dismissed_banner_key = attributes[:dismissed_banner_key] if attributes[
:dismissed_banner_key
].present?
unless SiteSetting.enable_discourse_connect && SiteSetting.discourse_connect_overrides_bio
user_profile.bio_raw = attributes.fetch(:bio_raw) { user_profile.bio_raw }
end
unless SiteSetting.enable_discourse_connect && SiteSetting.discourse_connect_overrides_location
user_profile.location = attributes.fetch(:location) { user_profile.location }
end
unless SiteSetting.enable_discourse_connect && SiteSetting.discourse_connect_overrides_website
user_profile.website = format_url(attributes.fetch(:website) { user_profile.website })
end
if attributes[:profile_background_upload_url] == "" ||
!guardian.can_upload_profile_header?(user)
user_profile.profile_background_upload_id = nil
elsif upload = Upload.get_from_url(attributes[:profile_background_upload_url])
user_profile.profile_background_upload_id = upload.id
end
if attributes[:card_background_upload_url] == "" ||
!guardian.can_upload_user_card_background?(user)
user_profile.card_background_upload_id = nil
elsif upload = Upload.get_from_url(attributes[:card_background_upload_url])
user_profile.card_background_upload_id = upload.id
end
if attributes[:user_notification_schedule]
user_notification_schedule =
user.user_notification_schedule || UserNotificationSchedule.new(user: user)
user_notification_schedule.assign_attributes(attributes[:user_notification_schedule])
end
old_user_name = user.name.present? ? user.name : ""
user.name = attributes.fetch(:name) { user.name } if guardian.can_edit_name?(user)
user.locale = attributes.fetch(:locale) { user.locale }
user.date_of_birth = attributes.fetch(:date_of_birth) { user.date_of_birth }
if attributes[:title] && attributes[:title] != user.title &&
guardian.can_grant_title?(user, attributes[:title])
user.title = attributes[:title]
end
if SiteSetting.user_selected_primary_groups && attributes[:primary_group_id] &&
attributes[:primary_group_id] != user.primary_group_id &&
guardian.can_use_primary_group?(user, attributes[:primary_group_id])
user.primary_group_id = attributes[:primary_group_id]
elsif SiteSetting.user_selected_primary_groups && attributes[:primary_group_id] &&
attributes[:primary_group_id].blank?
user.primary_group_id = nil
end
if attributes[:flair_group_id] && attributes[:flair_group_id] != user.flair_group_id &&
(
attributes[:flair_group_id].blank? ||
guardian.can_use_flair_group?(user, attributes[:flair_group_id])
)
user.flair_group_id = attributes[:flair_group_id]
end
if @guardian.can_change_tracking_preferences?(user)
CATEGORY_IDS.each do |attribute, level|
if ids = attributes[attribute]
CategoryUser.batch_set(user, level, ids)
end
end
TAG_NAMES.each do |attribute, level|
if attributes.has_key?(attribute)
TagUser.batch_set(user, level, attributes[attribute]&.split(",") || [])
end
end
end
save_options = false
# special handling for theme_id cause we need to bump a sequence number
if attributes.key?(:theme_ids)
attributes[:theme_ids].reject!(&:blank?)
attributes[:theme_ids].map!(&:to_i)
if @user_guardian.allow_themes?(attributes[:theme_ids])
user.user_option.theme_key_seq += 1 if user.user_option.theme_ids != attributes[:theme_ids]
else
attributes.delete(:theme_ids)
end
end
if attributes.key?(:text_size)
user.user_option.text_size_seq += 1 if user.user_option.text_size.to_s !=
attributes[:text_size]
end
OPTION_ATTR.each do |attribute|
if attributes.key?(attribute)
save_options = true
if [true, false].include?(user.user_option.public_send(attribute))
val = attributes[attribute].to_s == "true"
user.user_option.public_send("#{attribute}=", val)
else
user.user_option.public_send("#{attribute}=", attributes[attribute])
end
end
end
if attributes.key?(:skip_new_user_tips) && user.user_option.skip_new_user_tips
user.user_option.seen_popups = [-1]
end
# automatically disable digests when mailing_list_mode is enabled
user.user_option.email_digests = false if user.user_option.mailing_list_mode
fields = attributes[:custom_fields]
user.custom_fields = user.custom_fields.merge(fields) if fields.present?
saved = nil
User.transaction do
update_muted_users(attributes[:muted_usernames]) if attributes.key?(:muted_usernames)
if attributes.key?(:allowed_pm_usernames)
update_allowed_pm_users(attributes[:allowed_pm_usernames])
end
if attributes.key?(:discourse_connect)
update_discourse_connect(attributes[:discourse_connect])
end
if attributes.key?(:user_associated_accounts)
updated_associated_accounts(attributes[:user_associated_accounts])
end
if attributes.key?(:sidebar_category_ids)
SidebarSectionLinksUpdater.update_category_section_links(
user,
category_ids:
Category
.secured(@user_guardian)
.where(id: attributes[:sidebar_category_ids])
.pluck(:id),
)
end
if attributes.key?(:sidebar_tag_names) && SiteSetting.tagging_enabled
SidebarSectionLinksUpdater.update_tag_section_links(
user,
tag_ids:
DiscourseTagging
.filter_visible(Tag, @user_guardian)
.where(name: attributes[:sidebar_tag_names])
.pluck(:id),
)
end
if SiteSetting.enable_user_status?
update_user_status(attributes[:status]) if attributes.has_key?(:status)
end
name_changed = user.name_changed?
saved =
(!save_options || user.user_option.save) &&
(user_notification_schedule.nil? || user_notification_schedule.save) &&
user_profile.save && user.save
if saved && (name_changed && old_user_name.casecmp(attributes.fetch(:name)) != 0)
StaffActionLogger.new(@actor).log_name_change(
user.id,
old_user_name,
attributes.fetch(:name) { "" },
)
end
DiscourseEvent.trigger(:within_user_updater_transaction, user, attributes)
rescue Addressable::URI::InvalidURIError => e
# Prevent 500 for crazy url input
return saved
end
if saved
if user_notification_schedule
if user_notification_schedule.enabled
user_notification_schedule.create_do_not_disturb_timings(delete_existing: true)
else
user_notification_schedule.destroy_scheduled_timings
end
end
if attributes.key?(:seen_popups) || attributes.key?(:skip_new_user_tips)
MessageBus.publish(
"/user-tips/#{user.id}",
user.user_option.seen_popups,
user_ids: [user.id],
)
end
DiscourseEvent.trigger(:user_updated, user)
end
saved
end
def update_muted_users(usernames)
usernames ||= ""
desired_usernames = usernames.split(",").reject { |username| user.username == username }
desired_ids = User.where(username: desired_usernames).pluck(:id)
if desired_ids.empty?
MutedUser.where(user_id: user.id).destroy_all
else
MutedUser.where("user_id = ? AND muted_user_id not in (?)", user.id, desired_ids).destroy_all
# SQL is easier here than figuring out how to do the same in AR
DB.exec(<<~SQL, now: Time.now, user_id: user.id, desired_ids: desired_ids)
INSERT into muted_users(user_id, muted_user_id, created_at, updated_at)
SELECT :user_id, id, :now, :now
FROM users
WHERE id in (:desired_ids)
ON CONFLICT DO NOTHING
SQL
end
end
def update_allowed_pm_users(usernames)
usernames ||= ""
desired_usernames = usernames.split(",").reject { |username| user.username == username }
desired_ids = User.where(username: desired_usernames).pluck(:id)
if desired_ids.empty?
AllowedPmUser.where(user_id: user.id).destroy_all
else
AllowedPmUser.where(
"user_id = ? AND allowed_pm_user_id not in (?)",
user.id,
desired_ids,
).destroy_all
# SQL is easier here than figuring out how to do the same in AR
DB.exec(<<~SQL, now: Time.zone.now, user_id: user.id, desired_ids: desired_ids)
INSERT into allowed_pm_users(user_id, allowed_pm_user_id, created_at, updated_at)
SELECT :user_id, id, :now, :now
FROM users
WHERE id in (:desired_ids)
ON CONFLICT DO NOTHING
SQL
end
end
def updated_associated_accounts(associations)
associations.each do |association|
user_associated_account =
UserAssociatedAccount.find_or_initialize_by(
user_id: user.id,
provider_name: association[:provider_name],
)
if association[:provider_uid].present?
user_associated_account.update!(provider_uid: association[:provider_uid])
else
user_associated_account.destroy!
end
end
end
private
def update_user_status(status)
if status.blank?
@user.clear_status!
else
@user.set_status!(status[:description], status[:emoji], status[:ends_at])
end
end
def update_discourse_connect(discourse_connect)
external_id = discourse_connect[:external_id]
sso = SingleSignOnRecord.find_or_initialize_by(user_id: user.id)
if external_id.present?
sso.update!(
external_id: discourse_connect[:external_id],
last_payload: "external_id=#{discourse_connect[:external_id]}",
)
else
sso.destroy!
end
end
attr_reader :user, :guardian
def format_url(website)
return nil if website.blank?
website =~ /\Ahttp/ ? website : "http://#{website}"
end
end
``` | # frozen_string_literal: true
RSpec.describe UserUpdater do
fab!(:user)
fab!(:u1) { Fabricate(:user) }
fab!(:u2) { Fabricate(:user) }
fab!(:u3) { Fabricate(:user) }
let(:acting_user) { Fabricate.build(:user) }
describe "#update_muted_users" do
it "has no cross talk" do
updater = UserUpdater.new(u1, u1)
updater.update_muted_users("#{u2.username},#{u3.username}")
updater = UserUpdater.new(u2, u2)
updater.update_muted_users("#{u3.username},#{u1.username}")
updater = UserUpdater.new(u3, u3)
updater.update_muted_users("")
expect(MutedUser.where(user_id: u2.id).pluck(:muted_user_id)).to match_array([u3.id, u1.id])
expect(MutedUser.where(user_id: u1.id).pluck(:muted_user_id)).to match_array([u2.id, u3.id])
expect(MutedUser.where(user_id: u3.id).count).to eq(0)
end
it "excludes acting user" do
updater = UserUpdater.new(u1, u1)
updater.update_muted_users("#{u1.username},#{u2.username}")
expect(MutedUser.where(muted_user_id: u2.id).pluck(:muted_user_id)).to match_array([u2.id])
end
end
describe "#update" do
fab!(:category)
fab!(:tag)
fab!(:tag2) { Fabricate(:tag) }
it "saves user" do
user = Fabricate(:user, name: "Billy Bob")
updater = UserUpdater.new(user, user)
updater.update(name: "Jim Tom")
expect(user.reload.name).to eq "Jim Tom"
end
describe "the within_user_updater_transaction event" do
it "allows plugins to perform additional updates" do
update_attributes = { name: "Jimmmy Johnny" }
handler =
Proc.new do |user, attrs|
user.user_profile.update!(bio_raw: "hello world I'm Jimmmy")
expect(attrs).to eq(update_attributes)
end
DiscourseEvent.on(:within_user_updater_transaction, &handler)
updater = UserUpdater.new(user, user)
updater.update(update_attributes)
expect(user.reload.name).to eq("Jimmmy Johnny")
expect(user.user_profile.bio_raw).to eq("hello world I'm Jimmmy")
ensure
DiscourseEvent.off(:within_user_updater_transaction, &handler)
end
it "can cancel the whole update transaction if a handler raises" do
error_class = Class.new(StandardError)
handler = Proc.new { raise error_class.new }
DiscourseEvent.on(:within_user_updater_transaction, &handler)
old_name = user.name
updater = UserUpdater.new(user, user)
expect { updater.update(name: "Failure McClario") }.to raise_error(error_class)
expect(user.reload.name).to eq(old_name)
ensure
DiscourseEvent.off(:within_user_updater_transaction, &handler)
end
end
it "can update categories and tags" do
updater = UserUpdater.new(user, user)
updater.update(watched_tags: "#{tag.name},#{tag2.name}", muted_category_ids: [category.id])
expect(
TagUser.where(
user_id: user.id,
tag_id: tag.id,
notification_level: TagUser.notification_levels[:watching],
).exists?,
).to eq(true)
expect(
TagUser.where(
user_id: user.id,
tag_id: tag2.id,
notification_level: TagUser.notification_levels[:watching],
).exists?,
).to eq(true)
expect(
CategoryUser.where(
user_id: user.id,
category_id: category.id,
notification_level: CategoryUser.notification_levels[:muted],
).count,
).to eq(1)
end
context "with a staged user" do
let(:staged_user) { Fabricate(:staged) }
context "when allow_changing_staged_user_tracking is false" do
before { SiteSetting.allow_changing_staged_user_tracking = false }
it "doesn't update muted categories and watched tags" do
updater = UserUpdater.new(Fabricate(:admin), staged_user)
updater.update(watched_tags: "#{tag.name}", muted_category_ids: [category.id])
expect(TagUser.exists?(user_id: staged_user.id)).to eq(false)
expect(CategoryUser.exists?(user_id: staged_user.id)).to eq(false)
end
end
context "when allow_changing_staged_user_tracking is true" do
before { SiteSetting.allow_changing_staged_user_tracking = true }
it "updates muted categories and watched tags" do
updater = UserUpdater.new(Fabricate(:admin), staged_user)
updater.update(watched_tags: "#{tag.name}", muted_category_ids: [category.id])
expect(
TagUser.exists?(
user_id: staged_user.id,
tag_id: tag.id,
notification_level: TagUser.notification_levels[:watching],
),
).to eq(true)
expect(
CategoryUser.exists?(
user_id: staged_user.id,
category_id: category.id,
notification_level: CategoryUser.notification_levels[:muted],
),
).to eq(true)
end
end
end
it "doesn't remove notification prefs when updating something else" do
TagUser.create!(
user: user,
tag: tag,
notification_level: TagUser.notification_levels[:watching],
)
CategoryUser.create!(
user: user,
category: category,
notification_level: CategoryUser.notification_levels[:muted],
)
updater = UserUpdater.new(acting_user, user)
updater.update(name: "Steve Dave")
expect(TagUser.where(user: user).count).to eq(1)
expect(CategoryUser.where(user: user).count).to eq(1)
end
it "updates various fields" do
updater = UserUpdater.new(acting_user, user)
date_of_birth = Time.zone.now
SiteSetting.disable_mailing_list_mode = false
theme = Fabricate(:theme, user_selectable: true)
seq = user.user_option.theme_key_seq
val =
updater.update(
bio_raw: "my new bio",
email_level: UserOption.email_level_types[:always],
mailing_list_mode: true,
digest_after_minutes: "45",
new_topic_duration_minutes: 100,
auto_track_topics_after_msecs: 101,
notification_level_when_replying: 3,
email_in_reply_to: false,
date_of_birth: date_of_birth,
theme_ids: [theme.id],
allow_private_messages: false,
)
expect(val).to be_truthy
user.reload
expect(user.user_profile.bio_raw).to eq "my new bio"
expect(user.user_option.email_level).to eq UserOption.email_level_types[:always]
expect(user.user_option.mailing_list_mode).to eq true
expect(user.user_option.digest_after_minutes).to eq 45
expect(user.user_option.new_topic_duration_minutes).to eq 100
expect(user.user_option.auto_track_topics_after_msecs).to eq 101
expect(user.user_option.notification_level_when_replying).to eq 3
expect(user.user_option.email_in_reply_to).to eq false
expect(user.user_option.theme_ids.first).to eq theme.id
expect(user.user_option.theme_key_seq).to eq(seq + 1)
expect(user.user_option.allow_private_messages).to eq(false)
expect(user.date_of_birth).to eq(date_of_birth.to_date)
end
it "allows user to update profile header when the user has required trust level" do
user = Fabricate(:user, trust_level: 2)
updater = UserUpdater.new(user, user)
upload = Fabricate(:upload)
SiteSetting.min_trust_level_to_allow_profile_background = 2
val = updater.update(profile_background_upload_url: upload.url)
expect(val).to be_truthy
user.reload
expect(user.profile_background_upload).to eq(upload)
success = updater.update(profile_background_upload_url: "")
expect(success).to eq(true)
user.reload
expect(user.profile_background_upload).to eq(nil)
end
it "allows user to update user card background when the user has required trust level" do
user = Fabricate(:user, trust_level: 2)
updater = UserUpdater.new(user, user)
upload = Fabricate(:upload)
SiteSetting.min_trust_level_to_allow_user_card_background = 2
val = updater.update(card_background_upload_url: upload.url)
expect(val).to be_truthy
user.reload
expect(user.card_background_upload).to eq(upload)
success = updater.update(card_background_upload_url: "")
expect(success).to eq(true)
user.reload
expect(user.card_background_upload).to eq(nil)
end
it "disables email_digests when enabling mailing_list_mode" do
updater = UserUpdater.new(acting_user, user)
SiteSetting.disable_mailing_list_mode = false
val = updater.update(mailing_list_mode: true, email_digests: true)
expect(val).to be_truthy
user.reload
expect(user.user_option.email_digests).to eq false
expect(user.user_option.mailing_list_mode).to eq true
end
it "filters theme_ids blank values before updating preferences" do
user.user_option.update!(theme_ids: [1])
updater = UserUpdater.new(acting_user, user)
updater.update(theme_ids: [""])
user.reload
expect(user.user_option.theme_ids).to eq([])
updater.update(theme_ids: [nil])
user.reload
expect(user.user_option.theme_ids).to eq([])
theme = Fabricate(:theme)
child = Fabricate(:theme, component: true)
theme.add_relative_theme!(:child, child)
theme.set_default!
updater.update(theme_ids: [theme.id.to_s, child.id.to_s, "", nil])
user.reload
expect(user.user_option.theme_ids).to eq([theme.id, child.id])
end
let(:schedule_attrs) do
{
enabled: true,
day_0_start_time: 30,
day_0_end_time: 60,
day_1_start_time: 30,
day_1_end_time: 60,
day_2_start_time: 30,
day_2_end_time: 60,
day_3_start_time: 30,
day_3_end_time: 60,
day_4_start_time: 30,
day_4_end_time: 60,
day_5_start_time: 30,
day_5_end_time: 60,
day_6_start_time: 30,
day_6_end_time: 60,
}
end
context "with user_notification_schedule" do
it "allows users to create their notification schedule when it doesn't exist previously" do
expect(user.user_notification_schedule).to be_nil
updater = UserUpdater.new(acting_user, user)
updater.update(user_notification_schedule: schedule_attrs)
user.reload
expect(user.user_notification_schedule.enabled).to eq(true)
expect(user.user_notification_schedule.day_0_start_time).to eq(30)
expect(user.user_notification_schedule.day_0_end_time).to eq(60)
expect(user.user_notification_schedule.day_6_start_time).to eq(30)
expect(user.user_notification_schedule.day_6_end_time).to eq(60)
end
it "allows users to update their notification schedule" do
UserNotificationSchedule.create({ user: user }.merge(UserNotificationSchedule::DEFAULT))
updater = UserUpdater.new(acting_user, user)
updater.update(user_notification_schedule: schedule_attrs)
user.reload
expect(user.user_notification_schedule.enabled).to eq(true)
expect(user.user_notification_schedule.day_0_start_time).to eq(30)
expect(user.user_notification_schedule.day_0_end_time).to eq(60)
expect(user.user_notification_schedule.day_6_start_time).to eq(30)
expect(user.user_notification_schedule.day_6_end_time).to eq(60)
end
it "processes the schedule and do_not_disturb_timings are created" do
updater = UserUpdater.new(acting_user, user)
expect { updater.update(user_notification_schedule: schedule_attrs) }.to change {
user.do_not_disturb_timings.count
}.by(4)
end
it "removes do_not_disturb_timings when the schedule is disabled" do
updater = UserUpdater.new(acting_user, user)
updater.update(user_notification_schedule: schedule_attrs)
expect(user.user_notification_schedule.enabled).to eq(true)
schedule_attrs[:enabled] = false
updater.update(user_notification_schedule: schedule_attrs)
expect(user.user_notification_schedule.enabled).to eq(false)
expect(user.do_not_disturb_timings.count).to eq(0)
end
end
context "when sso overrides bio" do
it "does not change bio" do
SiteSetting.discourse_connect_url = "https://www.example.com/sso"
SiteSetting.enable_discourse_connect = true
SiteSetting.discourse_connect_overrides_bio = true
updater = UserUpdater.new(acting_user, user)
expect(updater.update(bio_raw: "new bio")).to be_truthy
user.reload
expect(user.user_profile.bio_raw).not_to eq "new bio"
end
end
context "when sso overrides location" do
it "does not change location" do
SiteSetting.discourse_connect_url = "https://www.example.com/sso"
SiteSetting.enable_discourse_connect = true
SiteSetting.discourse_connect_overrides_location = true
updater = UserUpdater.new(acting_user, user)
expect(updater.update(location: "new location")).to be_truthy
user.reload
expect(user.user_profile.location).not_to eq "new location"
end
end
context "when sso overrides website" do
it "does not change website" do
SiteSetting.discourse_connect_url = "https://www.example.com/sso"
SiteSetting.enable_discourse_connect = true
SiteSetting.discourse_connect_overrides_website = true
updater = UserUpdater.new(acting_user, user)
expect(updater.update(website: "https://google.com")).to be_truthy
user.reload
expect(user.user_profile.website).not_to eq "https://google.com"
end
end
context "when updating primary group" do
let(:new_group) { Group.create(name: "new_group") }
it "updates when setting is enabled" do
SiteSetting.user_selected_primary_groups = true
user.groups << new_group
user.update(primary_group_id: nil)
UserUpdater.new(acting_user, user).update(primary_group_id: new_group.id)
user.reload
expect(user.primary_group_id).to eq new_group.id
end
it "does not update when setting is disabled" do
SiteSetting.user_selected_primary_groups = false
user.groups << new_group
user.update(primary_group_id: nil)
UserUpdater.new(acting_user, user).update(primary_group_id: new_group.id)
user.reload
expect(user.primary_group_id).to eq nil
end
it "does not update when changing other profile data" do
SiteSetting.user_selected_primary_groups = true
user.groups << new_group
user.update(primary_group_id: new_group.id)
UserUpdater.new(acting_user, user).update(website: "http://example.com")
user.reload
expect(user.primary_group_id).to eq new_group.id
end
it "can be removed by the user when setting is enabled" do
SiteSetting.user_selected_primary_groups = true
user.groups << new_group
user.update(primary_group_id: new_group.id)
UserUpdater.new(acting_user, user).update(primary_group_id: "")
user.reload
expect(user.primary_group_id).to eq nil
end
it "cannot be removed by the user when setting is disabled" do
SiteSetting.user_selected_primary_groups = false
user.groups << new_group
user.update(primary_group_id: new_group.id)
UserUpdater.new(acting_user, user).update(primary_group_id: "")
user.reload
expect(user.primary_group_id).to eq new_group.id
end
end
context "when updating flair group" do
let(:group) do
Fabricate(
:group,
name: "Group",
flair_bg_color: "#111111",
flair_color: "#999999",
flair_icon: "icon",
)
end
it "updates when setting is enabled" do
group.add(user)
UserUpdater.new(acting_user, user).update(flair_group_id: group.id)
expect(user.reload.flair_group_id).to eq(group.id)
UserUpdater.new(acting_user, user).update(flair_group_id: "")
expect(user.reload.flair_group_id).to eq(nil)
end
end
context "when update fails" do
it "returns false" do
user.stubs(save: false)
updater = UserUpdater.new(acting_user, user)
expect(updater.update).to be_falsey
end
end
context "with permission to update title" do
it "allows user to change title" do
user = Fabricate(:user, title: "Emperor")
Guardian.any_instance.stubs(:can_grant_title?).with(user, "Minion").returns(true)
updater = UserUpdater.new(acting_user, user)
updater.update(title: "Minion")
expect(user.reload.title).to eq "Minion"
end
end
context "when title is from a badge" do
fab!(:user) { Fabricate(:user, title: "Emperor") }
fab!(:badge) { Fabricate(:badge, name: "Minion") }
context "when badge can be used as a title" do
before { badge.update(allow_title: true) }
it "can use as title, sets granted_title_badge_id" do
BadgeGranter.grant(badge, user)
updater = UserUpdater.new(user, user)
updater.update(title: badge.name)
user.reload
expect(user.user_profile.granted_title_badge_id).to eq(badge.id)
end
it "badge has not been granted, does not change title" do
badge.update(allow_title: true)
updater = UserUpdater.new(user, user)
updater.update(title: badge.name)
user.reload
expect(user.title).not_to eq(badge.name)
expect(user.user_profile.granted_title_badge_id).to be_nil
end
it "changing to a title that is not from a badge, unsets granted_title_badge_id" do
user.update(title: badge.name)
user.user_profile.update(granted_title_badge_id: badge.id)
Guardian.any_instance.stubs(:can_grant_title?).with(user, "Dancer").returns(true)
updater = UserUpdater.new(user, user)
updater.update(title: "Dancer")
user.reload
expect(user.title).to eq("Dancer")
expect(user.user_profile.granted_title_badge_id).to be_nil
end
end
it "cannot use as title, does not change title" do
BadgeGranter.grant(badge, user)
updater = UserUpdater.new(user, user)
updater.update(title: badge.name)
user.reload
expect(user.title).not_to eq(badge.name)
expect(user.user_profile.granted_title_badge_id).to be_nil
end
end
context "without permission to update title" do
it "does not allow user to change title" do
user = Fabricate(:user, title: "Emperor")
Guardian.any_instance.stubs(:can_grant_title?).with(user, "Minion").returns(false)
updater = UserUpdater.new(acting_user, user)
updater.update(title: "Minion")
expect(user.reload.title).not_to eq "Minion"
end
end
context "when website includes http" do
it "does not add http before updating" do
updater = UserUpdater.new(acting_user, user)
updater.update(website: "http://example.com")
expect(user.reload.user_profile.website).to eq "http://example.com"
end
end
context "when website does not include http" do
it "adds http before updating" do
updater = UserUpdater.new(acting_user, user)
updater.update(website: "example.com")
expect(user.reload.user_profile.website).to eq "http://example.com"
end
end
context "when website is invalid" do
it "returns an error" do
updater = UserUpdater.new(acting_user, user)
expect(updater.update(website: "ʔ<")).to eq nil
end
end
context "when custom_fields is empty string" do
it "update is successful" do
user.custom_fields = { "import_username" => "my_old_username" }
user.save
updater = UserUpdater.new(acting_user, user)
updater.update(website: "example.com", custom_fields: "")
expect(user.reload.custom_fields).to eq("import_username" => "my_old_username")
end
end
context "when skip_new_user_tips is edited" do
it "updates seen_popups too" do
messages =
MessageBus.track_publish("/user-tips/#{user.id}") do
UserUpdater.new(Discourse.system_user, user).update(skip_new_user_tips: true)
end
expect(user.user_option.skip_new_user_tips).to eq(true)
expect(user.user_option.seen_popups).to eq([-1])
expect(messages.map(&:data)).to contain_exactly([-1])
end
it "does not reset seen_popups" do
user.user_option.update!(seen_popups: [1, 2, 3])
UserUpdater.new(Discourse.system_user, user).update(skip_new_user_tips: false)
expect(user.user_option.skip_new_user_tips).to eq(false)
expect(user.user_option.seen_popups).to eq([1, 2, 3])
end
end
context "when seen_popups is edited" do
it "publishes a message" do
messages =
MessageBus.track_publish("/user-tips/#{user.id}") do
UserUpdater.new(Discourse.system_user, user).update(seen_popups: [1])
end
expect(user.user_option.seen_popups).to eq([1])
expect(messages.map(&:data)).to contain_exactly([1])
end
end
it "logs the action" do
user = Fabricate(:user, name: "Billy Bob")
expect do UserUpdater.new(user, user).update(name: "Jim Tom") end.to change {
UserHistory.count
}.by(1)
expect(UserHistory.last.action).to eq(UserHistory.actions[:change_name])
expect do UserUpdater.new(user, user).update(name: "JiM TOm") end.to_not change {
UserHistory.count
}
expect do UserUpdater.new(user, user).update(bio_raw: "foo bar") end.to_not change {
UserHistory.count
}
user_without_name = Fabricate(:user, name: nil)
expect do
UserUpdater.new(user_without_name, user_without_name).update(bio_raw: "foo bar")
end.to_not change { UserHistory.count }
expect do
UserUpdater.new(user_without_name, user_without_name).update(name: "Jim Tom")
end.to change { UserHistory.count }.by(1)
expect(UserHistory.last.action).to eq(UserHistory.actions[:change_name])
expect do UserUpdater.new(user, user).update(name: "") end.to change { UserHistory.count }.by(
1,
)
expect(UserHistory.last.action).to eq(UserHistory.actions[:change_name])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class RandomTopicSelector
BACKFILL_SIZE = 3000
BACKFILL_LOW_WATER_MARK = 500
def self.backfill(category = nil)
exclude = category&.topic_id
options = {
per_page: category ? category.num_featured_topics : 3,
visible: true,
no_definitions: true,
}
options[:except_topic_ids] = [category.topic_id] if exclude
if category
options[:category] = category.id
# NOTE: at the moment this site setting scopes tightly to a category (excluding subcats)
# this is done so we don't populate a junk cache
options[:no_subcategories] = true if SiteSetting.limit_suggested_to_category
# don't leak private categories into the "everything" group
options[:guardian] = Guardian.new(Discourse.system_user)
end
query = TopicQuery.new(nil, options)
results =
query
.latest_results
.order("RANDOM()")
.where(closed: false, archived: false)
.where("topics.created_at > ?", SiteSetting.suggested_topics_max_days_old.days.ago)
.limit(BACKFILL_SIZE)
.reorder("RANDOM()")
.pluck(:id)
key = cache_key(category)
if results.present?
Discourse.redis.multi do |transaction|
transaction.rpush(key, results)
transaction.expire(key, 2.days)
end
end
results
end
def self.next(count, category = nil)
key = cache_key(category)
results = []
return results if count < 1
results =
Discourse.redis.multi do |transaction|
transaction.lrange(key, 0, count - 1)
transaction.ltrim(key, count, -1)
end
if !results.is_a?(Array) # Redis is in readonly mode
results = Discourse.redis.lrange(key, 0, count - 1)
else
results = results[0]
end
results.map!(&:to_i)
left = count - results.length
backfilled = false
if left > 0
ids = backfill(category)
backfilled = true
results += ids[0...count]
results.uniq!
results = results[0...count]
end
if !backfilled && Discourse.redis.llen(key) < BACKFILL_LOW_WATER_MARK
Scheduler::Defer.later("backfill") { backfill(category) }
end
results
end
def self.cache_key(category = nil)
"random_topic_cache_#{category&.id}"
end
end
``` | # frozen_string_literal: true
RSpec.describe RandomTopicSelector do
it "can correctly use cache" do
key = RandomTopicSelector.cache_key
Discourse.redis.del key
4.times { |t| Discourse.redis.rpush key, t }
expect(RandomTopicSelector.next(0)).to eq([])
expect(RandomTopicSelector.next(2)).to eq([0, 1])
Discourse.redis.expects(:multi).returns(Discourse.received_redis_readonly!)
expect(RandomTopicSelector.next(2)).to eq([2, 3])
Discourse.redis.unstub(:multi)
expect(RandomTopicSelector.next(2)).to eq([2, 3])
expect(RandomTopicSelector.next(2)).to eq([])
end
it "can correctly backfill" do
category = Fabricate(:category, sort_order: "op_likes")
t1 = Fabricate(:topic, category_id: category.id)
_t2 = Fabricate(:topic, category_id: category.id, visible: false)
_t3 = Fabricate(:topic, category_id: category.id, deleted_at: 1.minute.ago)
t4 = Fabricate(:topic, category_id: category.id)
expect(RandomTopicSelector.next(5, category).sort).to eq([t1.id, t4.id].sort)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class BadgeGranter
class GrantError < StandardError
end
def self.disable_queue
@queue_disabled = true
end
def self.enable_queue
@queue_disabled = false
end
def initialize(badge, user, opts = {})
@badge, @user, @opts = badge, user, opts
@granted_by = opts[:granted_by] || Discourse.system_user
@post_id = opts[:post_id]
end
def self.grant(badge, user, opts = {})
BadgeGranter.new(badge, user, opts).grant
end
def self.enqueue_mass_grant_for_users(
badge,
emails: [],
usernames: [],
ensure_users_have_badge_once: true
)
emails = emails.map(&:downcase)
usernames = usernames.map(&:downcase)
usernames_map_to_ids = {}
emails_map_to_ids = {}
if usernames.size > 0
usernames_map_to_ids = User.where(username_lower: usernames).pluck(:username_lower, :id).to_h
end
if emails.size > 0
emails_map_to_ids = User.with_email(emails).pluck("LOWER(user_emails.email)", :id).to_h
end
count_per_user = {}
unmatched = Set.new
(usernames + emails).each do |entry|
id = usernames_map_to_ids[entry] || emails_map_to_ids[entry]
if id.blank?
unmatched << entry
next
end
if ensure_users_have_badge_once
count_per_user[id] = 1
else
count_per_user[id] ||= 0
count_per_user[id] += 1
end
end
existing_owners_ids = []
if ensure_users_have_badge_once
existing_owners_ids = UserBadge.where(badge: badge).distinct.pluck(:user_id)
end
count_per_user.each do |user_id, count|
next if ensure_users_have_badge_once && existing_owners_ids.include?(user_id)
Jobs.enqueue(:mass_award_badge, user: user_id, badge: badge.id, count: count)
end
{
unmatched_entries: unmatched.to_a,
matched_users_count: count_per_user.size,
unmatched_entries_count: unmatched.size,
}
end
def self.mass_grant(badge, user, count:)
return if !badge.enabled?
raise ArgumentError.new("count can't be less than 1") if count < 1
UserBadge.transaction do
DB.exec(
<<~SQL * count,
INSERT INTO user_badges
(granted_at, created_at, granted_by_id, user_id, badge_id, seq)
VALUES
(
:now,
:now,
:system,
:user_id,
:badge_id,
COALESCE((
SELECT MAX(seq) + 1
FROM user_badges
WHERE badge_id = :badge_id AND user_id = :user_id
), 0)
);
SQL
now: Time.zone.now,
system: Discourse.system_user.id,
user_id: user.id,
badge_id: badge.id,
)
notification = send_notification(user.id, user.username, user.locale, badge)
DB.exec(<<~SQL, notification_id: notification.id, user_id: user.id, badge_id: badge.id)
UPDATE user_badges
SET notification_id = :notification_id
WHERE notification_id IS NULL AND user_id = :user_id AND badge_id = :badge_id
SQL
UserBadge.update_featured_ranks!(user.id)
end
end
def grant
return if @granted_by && !Guardian.new(@granted_by).can_grant_badges?(@user)
return unless @badge.present? && @badge.enabled?
return if @user.blank?
find_by = { badge_id: @badge.id, user_id: @user.id }
find_by[:post_id] = @post_id if @badge.multiple_grant?
user_badge = UserBadge.find_by(find_by)
if user_badge.nil? || (@badge.multiple_grant? && @post_id.nil?)
UserBadge.transaction do
seq = 0
if @badge.multiple_grant?
seq = UserBadge.where(badge: @badge, user: @user).maximum(:seq)
seq = (seq || -1) + 1
end
user_badge =
UserBadge.create!(
badge: @badge,
user: @user,
granted_by: @granted_by,
granted_at: @opts[:created_at] || Time.now,
post_id: @post_id,
seq: seq,
)
return unless SiteSetting.enable_badges
if @granted_by != Discourse.system_user
StaffActionLogger.new(@granted_by).log_badge_grant(user_badge)
end
skip_new_user_tips = @user.user_option.skip_new_user_tips
unless self.class.suppress_notification?(@badge, user_badge.granted_at, skip_new_user_tips)
notification =
self.class.send_notification(@user.id, @user.username, @user.effective_locale, @badge)
user_badge.update!(notification_id: notification.id)
end
end
end
user_badge
end
def self.revoke(user_badge, options = {})
UserBadge.transaction do
user_badge.destroy!
if options[:revoked_by]
StaffActionLogger.new(options[:revoked_by]).log_badge_revoke(user_badge)
end
# If the user's title is the same as the badge name OR the custom badge name, remove their title.
custom_badge_name =
TranslationOverride.find_by(translation_key: user_badge.badge.translation_key)&.value
user_title_is_badge_name = user_badge.user.title == user_badge.badge.name
user_title_is_custom_badge_name =
custom_badge_name.present? && user_badge.user.title == custom_badge_name
if user_title_is_badge_name || user_title_is_custom_badge_name
if options[:revoked_by]
StaffActionLogger.new(options[:revoked_by]).log_title_revoke(
user_badge.user,
revoke_reason: "user title was same as revoked badge name or custom badge name",
previous_value: user_badge.user.title,
)
end
user_badge.user.title = nil
user_badge.user.save!
end
end
end
def self.revoke_all(badge)
custom_badge_names =
TranslationOverride.where(translation_key: badge.translation_key).pluck(:value)
users =
User.joins(:user_badges).where(user_badges: { badge_id: badge.id }).where(title: badge.name)
users =
users.or(
User.joins(:user_badges).where(title: custom_badge_names),
) unless custom_badge_names.empty?
users.update_all(title: nil)
UserBadge.where(badge: badge).delete_all
end
def self.queue_badge_grant(type, opt)
return if !SiteSetting.enable_badges || @queue_disabled
payload = nil
case type
when Badge::Trigger::PostRevision
post = opt[:post]
payload = { type: "PostRevision", post_ids: [post.id] }
when Badge::Trigger::UserChange
user = opt[:user]
payload = { type: "UserChange", user_ids: [user.id] }
when Badge::Trigger::TrustLevelChange
user = opt[:user]
payload = { type: "TrustLevelChange", user_ids: [user.id] }
when Badge::Trigger::PostAction
action = opt[:post_action]
payload = { type: "PostAction", post_ids: [action.post_id, action.related_post_id].compact! }
end
Discourse.redis.lpush queue_key, payload.to_json if payload
end
def self.clear_queue!
Discourse.redis.del queue_key
end
def self.process_queue!
limit = 1000
items = []
while limit > 0 && item = Discourse.redis.lpop(queue_key)
items << JSON.parse(item)
limit -= 1
end
items = items.group_by { |i| i["type"] }
items.each do |type, list|
post_ids = list.flat_map { |i| i["post_ids"] }.compact.uniq
user_ids = list.flat_map { |i| i["user_ids"] }.compact.uniq
next unless post_ids.present? || user_ids.present?
find_by_type(type).each { |badge| backfill(badge, post_ids: post_ids, user_ids: user_ids) }
end
end
def self.find_by_type(type)
Badge.where(trigger: "Badge::Trigger::#{type}".constantize)
end
def self.queue_key
"badge_queue"
end
# Options:
# :target_posts - whether the badge targets posts
# :trigger - the Badge::Trigger id
def self.contract_checks!(sql, opts = {})
return if sql.blank?
if Badge::Trigger.uses_post_ids?(opts[:trigger])
unless sql.match(/:post_ids/)
raise(
"Contract violation:\nQuery triggers on posts, but does not reference the ':post_ids' array",
)
end
if sql.match(/:user_ids/)
raise "Contract violation:\nQuery triggers on posts, but references the ':user_ids' array"
end
end
if Badge::Trigger.uses_user_ids?(opts[:trigger])
unless sql.match(/:user_ids/)
raise "Contract violation:\nQuery triggers on users, but does not reference the ':user_ids' array"
end
if sql.match(/:post_ids/)
raise "Contract violation:\nQuery triggers on users, but references the ':post_ids' array"
end
end
if opts[:trigger] && !Badge::Trigger.is_none?(opts[:trigger])
unless sql.match(/:backfill/)
raise "Contract violation:\nQuery is triggered, but does not reference the ':backfill' parameter.\n(Hint: if :backfill is TRUE, you should ignore the :post_ids/:user_ids)"
end
end
# TODO these three conditions have a lot of false negatives
if opts[:target_posts]
unless sql.match(/post_id/)
raise "Contract violation:\nQuery targets posts, but does not return a 'post_id' column"
end
end
unless sql.match(/user_id/)
raise "Contract violation:\nQuery does not return a 'user_id' column"
end
unless sql.match(/granted_at/)
raise "Contract violation:\nQuery does not return a 'granted_at' column"
end
if sql.match(/;\s*\z/)
raise "Contract violation:\nQuery ends with a semicolon. Remove the semicolon; your sql will be used in a subquery."
end
end
# Options:
# :target_posts - whether the badge targets posts
# :trigger - the Badge::Trigger id
# :explain - return the EXPLAIN query
def self.preview(sql, opts = {})
params = { user_ids: [], post_ids: [], backfill: true }
BadgeGranter.contract_checks!(sql, opts)
# hack to allow for params, otherwise sanitizer will trigger sprintf
count_sql = <<~SQL
SELECT COUNT(*) count
FROM (
#{sql}
) q
WHERE :backfill = :backfill
SQL
grant_count = DB.query_single(count_sql, params).first.to_i
grants_sql =
if opts[:target_posts]
<<~SQL
SELECT u.id, u.username, q.post_id, t.title, q.granted_at
FROM (
#{sql}
) q
JOIN users u on u.id = q.user_id
LEFT JOIN badge_posts p on p.id = q.post_id
LEFT JOIN topics t on t.id = p.topic_id
WHERE :backfill = :backfill
LIMIT 10
SQL
else
<<~SQL
SELECT u.id, u.username, q.granted_at
FROM (
#{sql}
) q
JOIN users u on u.id = q.user_id
WHERE :backfill = :backfill
LIMIT 10
SQL
end
query_plan = nil
# HACK: active record sanitization too flexible, force it to go down the sanitization path that cares not for % stuff
# note mini_sql uses AR sanitizer at the moment (review if changed)
query_plan = DB.query_hash("EXPLAIN #{sql} /*:backfill*/", params) if opts[:explain]
sample = DB.query(grants_sql, params)
sample.each do |result|
unless User.exists?(id: result.id)
raise "Query returned a non-existent user ID:\n#{result.id}"
end
unless result.granted_at
raise "Query did not return a badge grant time\n(Try using 'current_timestamp granted_at')"
end
if opts[:target_posts]
raise "Query did not return a post ID" unless result.post_id
unless Post.exists?(result.post_id).present?
raise "Query returned a non-existent post ID:\n#{result.post_id}"
end
end
end
{ grant_count: grant_count, sample: sample, query_plan: query_plan }
rescue => e
{ errors: e.message }
end
MAX_ITEMS_FOR_DELTA ||= 200
def self.backfill(badge, opts = nil)
return unless SiteSetting.enable_badges
return unless badge.enabled
return unless badge.query.present?
post_ids = user_ids = nil
post_ids = opts[:post_ids] if opts
user_ids = opts[:user_ids] if opts
# safeguard fall back to full backfill if more than 200
if (post_ids && post_ids.size > MAX_ITEMS_FOR_DELTA) ||
(user_ids && user_ids.size > MAX_ITEMS_FOR_DELTA)
post_ids = nil
user_ids = nil
end
post_ids = nil if post_ids.blank?
user_ids = nil if user_ids.blank?
full_backfill = !user_ids && !post_ids
post_clause = badge.target_posts ? "AND (q.post_id = ub.post_id OR NOT :multiple_grant)" : ""
post_id_field = badge.target_posts ? "q.post_id" : "NULL"
sql = <<~SQL
DELETE FROM user_badges
WHERE id IN (
SELECT ub.id
FROM user_badges ub
LEFT JOIN (
#{badge.query}
) q ON q.user_id = ub.user_id
#{post_clause}
WHERE ub.badge_id = :id AND q.user_id IS NULL
)
SQL
if badge.auto_revoke && full_backfill
DB.exec(
sql,
id: badge.id,
post_ids: [-1],
user_ids: [-2],
backfill: true,
multiple_grant: true, # cheat here, cause we only run on backfill and are deleting
)
end
sql = <<~SQL
WITH w as (
INSERT INTO user_badges(badge_id, user_id, granted_at, granted_by_id, created_at, post_id)
SELECT :id, q.user_id, q.granted_at, -1, current_timestamp, #{post_id_field}
FROM (
#{badge.query}
) q
LEFT JOIN user_badges ub ON ub.badge_id = :id AND ub.user_id = q.user_id
#{post_clause}
/*where*/
ON CONFLICT DO NOTHING
RETURNING id, user_id, granted_at
)
SELECT w.*, username, locale, (u.admin OR u.moderator) AS staff, uo.skip_new_user_tips
FROM w
JOIN users u on u.id = w.user_id
JOIN user_options uo ON uo.user_id = w.user_id
SQL
builder = DB.build(sql)
builder.where("ub.badge_id IS NULL AND q.user_id > 0")
if (post_ids || user_ids) && !badge.query.include?(":backfill")
Rails.logger.warn "Your triggered badge query for #{badge.name} does not include the :backfill param, skipping!"
return
end
if post_ids && !badge.query.include?(":post_ids")
Rails.logger.warn "Your triggered badge query for #{badge.name} does not include the :post_ids param, skipping!"
return
end
if user_ids && !badge.query.include?(":user_ids")
Rails.logger.warn "Your triggered badge query for #{badge.name} does not include the :user_ids param, skipping!"
return
end
builder
.query(
id: badge.id,
multiple_grant: badge.multiple_grant,
backfill: full_backfill,
post_ids: post_ids || [-2],
user_ids: user_ids || [-2],
)
.each do |row|
next if suppress_notification?(badge, row.granted_at, row.skip_new_user_tips)
next if row.staff && badge.awarded_for_trust_level?
notification = send_notification(row.user_id, row.username, row.locale, badge)
UserBadge.trigger_user_badge_granted_event(badge.id, row.user_id)
DB.exec(
"UPDATE user_badges SET notification_id = :notification_id WHERE id = :id",
notification_id: notification.id,
id: row.id,
)
end
badge.reset_grant_count!
rescue => e
raise GrantError, "Failed to backfill '#{badge.name}' badge: #{opts}. Reason: #{e.message}"
end
def self.revoke_ungranted_titles!
DB.exec <<~SQL
UPDATE users u
SET title = ''
FROM user_profiles up
WHERE u.title IS NOT NULL
AND u.title <> ''
AND up.user_id = u.id
AND up.granted_title_badge_id IS NOT NULL
AND NOT EXISTS(
SELECT 1
FROM badges b
JOIN user_badges ub ON ub.user_id = u.id AND ub.badge_id = b.id
WHERE b.id = up.granted_title_badge_id
AND b.allow_title
AND b.enabled
)
SQL
DB.exec <<~SQL
UPDATE user_profiles up
SET granted_title_badge_id = NULL
FROM users u
WHERE up.user_id = u.id
AND (u.title IS NULL OR u.title = '')
AND up.granted_title_badge_id IS NOT NULL
SQL
end
def self.notification_locale(locale)
use_default_locale = !SiteSetting.allow_user_locale || locale.blank?
use_default_locale ? SiteSetting.default_locale : locale
end
def self.send_notification(user_id, username, locale, badge)
I18n.with_locale(notification_locale(locale)) do
Notification.create!(
user_id: user_id,
notification_type: Notification.types[:granted_badge],
data: {
badge_id: badge.id,
badge_name: badge.display_name,
badge_slug: badge.slug,
badge_title: badge.allow_title,
username: username,
}.to_json,
)
end
end
def self.suppress_notification?(badge, granted_at, skip_new_user_tips)
is_old_bronze_badge = badge.badge_type_id == BadgeType::Bronze && granted_at < 2.days.ago
skip_beginner_badge = skip_new_user_tips && badge.for_beginners?
is_old_bronze_badge || skip_beginner_badge
end
end
``` | # frozen_string_literal: true
RSpec.describe BadgeGranter do
fab!(:badge)
fab!(:user)
before { BadgeGranter.enable_queue }
after do
BadgeGranter.disable_queue
BadgeGranter.clear_queue!
end
describe "revoke_titles" do
let(:user) { Fabricate(:user) }
let(:badge) { Fabricate(:badge, allow_title: true) }
it "revokes title when badge is not allowed as title" do
BadgeGranter.grant(badge, user)
user.update!(title: badge.name)
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to eq(badge.name)
expect(user.user_profile.granted_title_badge_id).to eq(badge.id)
badge.update_column(:allow_title, false)
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to be_blank
expect(user.user_profile.granted_title_badge_id).to be_nil
end
it "revokes title when badge is disabled" do
BadgeGranter.grant(badge, user)
user.update!(title: badge.name)
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to eq(badge.name)
expect(user.user_profile.granted_title_badge_id).to eq(badge.id)
badge.update_column(:enabled, false)
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to be_blank
expect(user.user_profile.granted_title_badge_id).to be_nil
end
it "revokes title when user badge is revoked" do
BadgeGranter.grant(badge, user)
user.update!(title: badge.name)
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to eq(badge.name)
expect(user.user_profile.granted_title_badge_id).to eq(badge.id)
BadgeGranter.revoke(user.user_badges.first)
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to be_blank
expect(user.user_profile.granted_title_badge_id).to be_nil
end
it "does not revoke custom title" do
user.title = "CEO"
user.save!
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to eq("CEO")
end
it "does not revoke localized title" do
badge = Badge.find(Badge::Regular)
badge_name = nil
BadgeGranter.grant(badge, user)
I18n.with_locale(:de) do
badge_name = badge.display_name
user.update!(title: badge_name)
end
user.reload
expect(user.title).to eq(badge_name)
expect(user.user_profile.granted_title_badge_id).to eq(badge.id)
BadgeGranter.revoke_ungranted_titles!
user.reload
expect(user.title).to eq(badge_name)
expect(user.user_profile.granted_title_badge_id).to eq(badge.id)
end
end
describe "preview" do
it "can correctly preview" do
Fabricate(:user, email: "[email protected]")
result =
BadgeGranter.preview(
'select u.id user_id, null post_id, u.created_at granted_at from users u
join user_emails ue on ue.user_id = u.id AND ue.primary
where ue.email like \'%gmail.com\'',
explain: true,
)
expect(result[:grant_count]).to eq(1)
expect(result[:query_plan]).to be_present
end
it "with badges containing trailing comments do not break generated SQL" do
query = Badge.find(1).query + "\n-- a comment"
expect(BadgeGranter.preview(query)[:errors]).to be_nil
end
end
describe "backfill" do
it "has no broken badge queries" do
Badge.all.each { |b| BadgeGranter.backfill(b) }
end
it "can backfill the welcome badge" do
post = Fabricate(:post)
user2 = Fabricate(:user)
PostActionCreator.like(user2, post)
UserBadge.destroy_all
BadgeGranter.backfill(Badge.find(Badge::Welcome))
BadgeGranter.backfill(Badge.find(Badge::FirstLike))
b = UserBadge.find_by(user_id: post.user_id)
expect(b.post_id).to eq(post.id)
b.badge_id = Badge::Welcome
b = UserBadge.find_by(user_id: user2.id)
expect(b.post_id).to eq(post.id)
b.badge_id = Badge::FirstLike
end
it "should grant missing badges" do
nice_topic = Badge.find(Badge::NiceTopic)
good_topic = Badge.find(Badge::GoodTopic)
post = Fabricate(:post, like_count: 30)
2.times do
BadgeGranter.backfill(nice_topic, post_ids: [post.id])
BadgeGranter.backfill(good_topic)
end
# TODO add welcome
expect(post.user.user_badges.pluck(:badge_id)).to contain_exactly(
nice_topic.id,
good_topic.id,
)
expect(post.user.notifications.count).to eq(2)
data = post.user.notifications.last.data_hash
expect(data["badge_id"]).to eq(good_topic.id)
expect(data["badge_slug"]).to eq(good_topic.slug)
expect(data["username"]).to eq(post.user.username)
expect(nice_topic.grant_count).to eq(1)
expect(good_topic.grant_count).to eq(1)
end
it "should grant badges in the user locale" do
SiteSetting.allow_user_locale = true
nice_topic = Badge.find(Badge::NiceTopic)
name_english = nice_topic.name
user = Fabricate(:user, locale: "fr")
post = Fabricate(:post, like_count: 10, user: user)
BadgeGranter.backfill(nice_topic)
notification_badge_name = JSON.parse(post.user.notifications.first.data)["badge_name"]
expect(notification_badge_name).not_to eq(name_english)
end
it "with badges containing trailing comments do not break generated SQL" do
badge = Fabricate(:badge)
badge.query = Badge.find(1).query + "\n-- a comment"
expect { BadgeGranter.backfill(badge) }.not_to raise_error
end
it 'does not notify about badges "for beginners" when user skipped new user tips' do
user.user_option.update!(skip_new_user_tips: true)
post = Fabricate(:post)
PostActionCreator.like(user, post)
expect { BadgeGranter.backfill(Badge.find(Badge::FirstLike)) }.to_not change {
Notification.where(user_id: user.id).count
}
end
it "does not grant sharing badges to deleted users" do
post = Fabricate(:post)
incoming_links = Fabricate.times(25, :incoming_link, post: post, user: user)
user_id = user.id
user.destroy!
nice_share = Badge.find(Badge::NiceShare)
first_share = Badge.find(Badge::FirstShare)
BadgeGranter.backfill(nice_share)
BadgeGranter.backfill(first_share)
expect(UserBadge.where(user_id: user_id).count).to eq(0)
end
end
describe "grant" do
it "allows overriding of granted_at does not notify old bronze" do
freeze_time
badge = Badge.create!(name: "a badge", badge_type_id: BadgeType::Bronze)
user_badge = BadgeGranter.grant(badge, user, created_at: 1.year.ago)
expect(user_badge.granted_at).to eq_time(1.year.ago)
expect(Notification.where(user_id: user.id).count).to eq(0)
end
it "handles deleted badge" do
freeze_time
user_badge = BadgeGranter.grant(nil, user, created_at: 1.year.ago)
expect(user_badge).to eq(nil)
end
it "doesn't grant disabled badges" do
freeze_time
badge = Fabricate(:badge, badge_type_id: BadgeType::Bronze, enabled: false)
user_badge = BadgeGranter.grant(badge, user, created_at: 1.year.ago)
expect(user_badge).to eq(nil)
end
it "doesn't notify about badges 'for beginners' when user skipped new user tips" do
freeze_time
UserBadge.destroy_all
user.user_option.update!(skip_new_user_tips: true)
badge = Fabricate(:badge, badge_grouping_id: BadgeGrouping::GettingStarted)
expect { BadgeGranter.grant(badge, user) }.to_not change {
Notification.where(user_id: user.id).count
}
end
it "notifies about the New User of the Month badge when user skipped new user tips" do
freeze_time
user.user_option.update!(skip_new_user_tips: true)
badge = Badge.find(Badge::NewUserOfTheMonth)
expect { BadgeGranter.grant(badge, user) }.to change {
Notification.where(user_id: user.id).count
}
end
it "grants multiple badges" do
badge = Fabricate(:badge, multiple_grant: true)
user_badge = BadgeGranter.grant(badge, user)
user_badge = BadgeGranter.grant(badge, user)
expect(user_badge).to be_present
expect(UserBadge.where(user_id: user.id).count).to eq(2)
end
it "sets granted_at" do
day_ago = freeze_time 1.day.ago
user_badge = BadgeGranter.grant(badge, user)
expect(user_badge.granted_at).to eq_time(day_ago)
end
it "sets granted_by if the option is present" do
admin = Fabricate(:admin)
StaffActionLogger.any_instance.expects(:log_badge_grant).once
user_badge = BadgeGranter.grant(badge, user, granted_by: admin)
expect(user_badge.granted_by).to eq(admin)
end
it "defaults granted_by to the system user" do
StaffActionLogger.any_instance.expects(:log_badge_grant).never
user_badge = BadgeGranter.grant(badge, user)
expect(user_badge.granted_by_id).to eq(Discourse.system_user.id)
end
it "does not allow a regular user to grant badges" do
user_badge = BadgeGranter.grant(badge, user, granted_by: Fabricate(:user))
expect(user_badge).not_to be_present
end
it "increments grant_count on the badge and creates a notification" do
BadgeGranter.grant(badge, user)
expect(badge.reload.grant_count).to eq(1)
expect(
user.notifications.find_by(notification_type: Notification.types[:granted_badge]).data_hash[
"badge_id"
],
).to eq(badge.id)
end
it "does not fail when user is missing" do
BadgeGranter.grant(badge, nil)
expect(badge.reload.grant_count).to eq(0)
end
end
describe "revoke" do
fab!(:admin)
let!(:user_badge) { BadgeGranter.grant(badge, user) }
it "revokes the badge and does necessary cleanup" do
user.title = badge.name
user.save!
expect(badge.reload.grant_count).to eq(1)
StaffActionLogger.any_instance.expects(:log_badge_revoke).with(user_badge)
BadgeGranter.revoke(user_badge, revoked_by: admin)
expect(UserBadge.find_by(user: user, badge: badge)).not_to be_present
expect(badge.reload.grant_count).to eq(0)
expect(
user.notifications.where(notification_type: Notification.types[:granted_badge]),
).to be_empty
expect(user.reload.title).to eq(nil)
end
context "when the badge name is customized, and the customized name is the same as the user title" do
let(:customized_badge_name) { "Merit Badge" }
before do
TranslationOverride.upsert!(I18n.locale, Badge.i18n_key(badge.name), customized_badge_name)
end
it "revokes the badge and title and does necessary cleanup" do
user.title = customized_badge_name
user.save!
expect(badge.reload.grant_count).to eq(1)
StaffActionLogger.any_instance.expects(:log_badge_revoke).with(user_badge)
StaffActionLogger
.any_instance
.expects(:log_title_revoke)
.with(
user,
revoke_reason: "user title was same as revoked badge name or custom badge name",
previous_value: user_badge.user.title,
)
BadgeGranter.revoke(user_badge, revoked_by: admin)
expect(UserBadge.find_by(user: user, badge: badge)).not_to be_present
expect(badge.reload.grant_count).to eq(0)
expect(
user.notifications.where(notification_type: Notification.types[:granted_badge]),
).to be_empty
expect(user.reload.title).to eq(nil)
end
after { TranslationOverride.revert!(I18n.locale, Badge.i18n_key(badge.name)) }
end
end
describe "revoke_all" do
it "deletes every user_badge record associated with that badge" do
described_class.grant(badge, user)
described_class.revoke_all(badge)
expect(UserBadge.exists?(badge: badge, user: user)).to eq(false)
end
it "removes titles" do
another_title = "another title"
described_class.grant(badge, user)
user.update!(title: badge.name)
user2 = Fabricate(:user, title: another_title)
described_class.revoke_all(badge)
expect(user.reload.title).to be_nil
expect(user2.reload.title).to eq(another_title)
end
it "removes custom badge titles" do
custom_badge_title = "this is a badge title"
TranslationOverride.create!(
translation_key: badge.translation_key,
value: custom_badge_title,
locale: "en",
)
described_class.grant(badge, user)
user.update!(title: custom_badge_title)
described_class.revoke_all(badge)
expect(user.reload.title).to be_nil
end
end
describe "update_badges" do
fab!(:user)
fab!(:liker) { Fabricate(:user) }
it "grants autobiographer" do
user.user_profile.bio_raw = "THIS IS MY bio it a long bio I like my bio"
user.uploaded_avatar_id = 10
user.user_profile.save
user.save
BadgeGranter.process_queue!
expect(UserBadge.where(user_id: user.id, badge_id: Badge::Autobiographer).count).to eq(1)
end
it "grants read guidelines" do
user.user_stat.read_faq = Time.now
user.user_stat.save
BadgeGranter.process_queue!
expect(UserBadge.where(user_id: user.id, badge_id: Badge::ReadGuidelines).count).to eq(1)
end
it "grants first link" do
post = create_post
post2 = create_post(raw: "#{Discourse.base_url}/t/slug/#{post.topic_id}")
BadgeGranter.process_queue!
expect(UserBadge.where(user_id: post2.user.id, badge_id: Badge::FirstLink).count).to eq(1)
end
it "grants first edit" do
SiteSetting.editing_grace_period = 0
post = create_post
user = post.user
expect(UserBadge.where(user_id: user.id, badge_id: Badge::Editor).count).to eq(0)
PostRevisor.new(post).revise!(user, raw: "This is my new test 1235 123")
BadgeGranter.process_queue!
expect(UserBadge.where(user_id: user.id, badge_id: Badge::Editor).count).to eq(1)
end
it "grants and revokes trust level badges" do
user.change_trust_level!(TrustLevel[4])
BadgeGranter.process_queue!
expect(UserBadge.where(user_id: user.id, badge_id: Badge.trust_level_badge_ids).count).to eq(
4,
)
user.change_trust_level!(TrustLevel[1])
BadgeGranter.backfill(Badge.find(1))
BadgeGranter.backfill(Badge.find(2))
expect(UserBadge.where(user_id: user.id, badge_id: 1).first).not_to eq(nil)
expect(UserBadge.where(user_id: user.id, badge_id: 2).first).to eq(nil)
end
it "grants system like badges" do
post = create_post(user: user)
# Welcome badge
action = PostActionCreator.like(liker, post).post_action
BadgeGranter.process_queue!
expect(UserBadge.find_by(user_id: user.id, badge_id: 5)).not_to eq(nil)
post = create_post(topic: post.topic, user: user)
action = PostActionCreator.like(liker, post).post_action
# Nice post badge
post.update like_count: 10
BadgeGranter.queue_badge_grant(Badge::Trigger::PostAction, post_action: action)
BadgeGranter.process_queue!
expect(UserBadge.find_by(user_id: user.id, badge_id: Badge::NicePost)).not_to eq(nil)
expect(UserBadge.where(user_id: user.id, badge_id: Badge::NicePost).count).to eq(1)
# Good post badge
post.update like_count: 25
BadgeGranter.queue_badge_grant(Badge::Trigger::PostAction, post_action: action)
BadgeGranter.process_queue!
expect(UserBadge.find_by(user_id: user.id, badge_id: Badge::GoodPost)).not_to eq(nil)
# Great post badge
post.update like_count: 50
BadgeGranter.queue_badge_grant(Badge::Trigger::PostAction, post_action: action)
BadgeGranter.process_queue!
expect(UserBadge.find_by(user_id: user.id, badge_id: Badge::GreatPost)).not_to eq(nil)
# Revoke badges on unlike
post.update like_count: 49
BadgeGranter.backfill(Badge.find(Badge::GreatPost))
expect(UserBadge.find_by(user_id: user.id, badge_id: Badge::GreatPost)).to eq(nil)
end
it "triggers the 'user_badge_granted' DiscourseEvent per badge when badges are backfilled" do
post = create_post(user: user)
action = PostActionCreator.like(liker, post).post_action
events = DiscourseEvent.track_events(:user_badge_granted) { BadgeGranter.process_queue! }
expect(events.length).to eq(2)
expect(events[0][:params]).to eq([Badge::FirstLike, liker.id])
expect(events[1][:params]).to eq([Badge::Welcome, user.id])
end
end
describe "notification locales" do
it "is using default locales when user locales are not set" do
SiteSetting.allow_user_locale = true
expect(BadgeGranter.notification_locale("")).to eq(SiteSetting.default_locale)
end
it "is using default locales when user locales are set but is not allowed" do
SiteSetting.allow_user_locale = false
expect(BadgeGranter.notification_locale("pl_PL")).to eq(SiteSetting.default_locale)
end
it "is using user locales when set and allowed" do
SiteSetting.allow_user_locale = true
expect(BadgeGranter.notification_locale("pl_PL")).to eq("pl_PL")
end
end
describe ".mass_grant" do
it "raises an error if the count argument is less than 1" do
expect do BadgeGranter.mass_grant(badge, user, count: 0) end.to raise_error(
ArgumentError,
"count can't be less than 1",
)
end
it "grants the badge to the user as many times as the count argument" do
BadgeGranter.mass_grant(badge, user, count: 10)
sequence = UserBadge.where(badge: badge, user: user).pluck(:seq).sort
expect(sequence).to eq((0...10).to_a)
BadgeGranter.mass_grant(badge, user, count: 10)
sequence = UserBadge.where(badge: badge, user: user).pluck(:seq).sort
expect(sequence).to eq((0...20).to_a)
end
end
describe ".enqueue_mass_grant_for_users" do
before { Jobs.run_immediately! }
it "returns a list of the entries that could not be matched to any users" do
results =
BadgeGranter.enqueue_mass_grant_for_users(
badge,
emails: ["[email protected]", user.email],
usernames: [user.username, "fakeusername"],
)
expect(results[:unmatched_entries]).to contain_exactly(
"[email protected]",
"fakeusername",
)
expect(results[:matched_users_count]).to eq(1)
expect(results[:unmatched_entries_count]).to eq(2)
end
context "when ensure_users_have_badge_once is true" do
it "ensures each user has the badge at least once and does not grant the badge multiple times to one user" do
BadgeGranter.grant(badge, user)
user_without_badge = Fabricate(:user)
Notification.destroy_all
results =
BadgeGranter.enqueue_mass_grant_for_users(
badge,
usernames: [
user.username,
user.username,
user_without_badge.username,
user_without_badge.username,
],
ensure_users_have_badge_once: true,
)
expect(results[:unmatched_entries]).to eq([])
expect(results[:matched_users_count]).to eq(2)
expect(results[:unmatched_entries_count]).to eq(0)
sequence = UserBadge.where(user: user, badge: badge).pluck(:seq)
expect(sequence).to contain_exactly(0)
# no new badge/notification because user already had the badge
# before enqueue_mass_grant_for_users was called
expect(user.reload.notifications.size).to eq(0)
sequence = UserBadge.where(user: user_without_badge, badge: badge)
expect(sequence.pluck(:seq)).to contain_exactly(0)
notifications = user_without_badge.reload.notifications
expect(notifications.size).to eq(1)
expect(sequence.first.notification_id).to eq(notifications.first.id)
expect(notifications.first.notification_type).to eq(Notification.types[:granted_badge])
end
end
context "when ensure_users_have_badge_once is false" do
it "grants the badge to the users as many times as they appear in the emails and usernames arguments" do
badge.update!(multiple_grant: true)
user_without_badge = Fabricate(:user)
user_with_badge = Fabricate(:user).tap { |u| BadgeGranter.grant(badge, u) }
Notification.destroy_all
emails = [user_with_badge.email.titlecase, user_without_badge.email.titlecase] * 20
usernames = [user_with_badge.username.titlecase, user_without_badge.username.titlecase] * 20
results =
BadgeGranter.enqueue_mass_grant_for_users(
badge,
emails: emails,
usernames: usernames,
ensure_users_have_badge_once: false,
)
expect(results[:unmatched_entries]).to eq([])
expect(results[:matched_users_count]).to eq(2)
expect(results[:unmatched_entries_count]).to eq(0)
sequence = UserBadge.where(user: user_with_badge, badge: badge).pluck(:seq)
expect(sequence.size).to eq(40 + 1)
expect(sequence.sort).to eq((0...(40 + 1)).to_a)
sequence = UserBadge.where(user: user_without_badge, badge: badge).pluck(:seq)
expect(sequence.size).to eq(40)
expect(sequence.sort).to eq((0...40).to_a)
# each user gets 1 notification no matter how many times
# they're repeated in the file.
[user_without_badge, user_with_badge].each do |u|
notifications = u.reload.notifications
expect(notifications.size).to eq(1)
expect(notifications.map(&:notification_type).uniq).to contain_exactly(
Notification.types[:granted_badge],
)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class TopicSummarization
def initialize(strategy)
@strategy = strategy
end
def summarize(topic, user, opts = {}, &on_partial_blk)
existing_summary = SummarySection.find_by(target: topic, meta_section_id: nil)
# Existing summary shouldn't be nil in this scenario because the controller checks its existence.
return if !user && !existing_summary
targets_data = summary_targets(topic).pluck(:post_number, :raw, :username)
current_topic_sha = build_sha(targets_data.map(&:first))
can_summarize = Summarization::Base.can_request_summary_for?(user)
if use_cached?(existing_summary, can_summarize, current_topic_sha, !!opts[:skip_age_check])
# It's important that we signal a cached summary is outdated
existing_summary.mark_as_outdated if new_targets?(existing_summary, current_topic_sha)
return existing_summary
end
delete_cached_summaries_of(topic) if existing_summary
content = {
resource_path: "#{Discourse.base_path}/t/-/#{topic.id}",
content_title: topic.title,
contents: [],
}
targets_data.map do |(pn, raw, username)|
content[:contents] << { poster: username, id: pn, text: raw }
end
summarization_result = strategy.summarize(content, user, &on_partial_blk)
cache_summary(summarization_result, targets_data.map(&:first), topic)
end
def summary_targets(topic)
topic.has_summary? ? best_replies(topic) : pick_selection(topic)
end
private
attr_reader :strategy
def best_replies(topic)
Post
.summary(topic.id)
.where("post_type = ?", Post.types[:regular])
.where("NOT hidden")
.joins(:user)
.order(:post_number)
end
def pick_selection(topic)
posts =
Post
.where(topic_id: topic.id)
.where("post_type = ?", Post.types[:regular])
.where("NOT hidden")
.order(:post_number)
post_numbers = posts.limit(5).pluck(:post_number)
post_numbers += posts.reorder("posts.score desc").limit(50).pluck(:post_number)
post_numbers += posts.reorder("post_number desc").limit(5).pluck(:post_number)
Post
.where(topic_id: topic.id)
.joins(:user)
.where("post_number in (?)", post_numbers)
.order(:post_number)
end
def delete_cached_summaries_of(topic)
SummarySection.where(target: topic).destroy_all
end
# For users without permissions to generate a summary or fresh summaries, we return what we have cached.
def use_cached?(existing_summary, can_summarize, current_sha, skip_age_check)
existing_summary &&
!(
can_summarize && new_targets?(existing_summary, current_sha) &&
(skip_age_check || existing_summary.created_at < 1.hour.ago)
)
end
def new_targets?(summary, current_sha)
summary.original_content_sha != current_sha
end
def cache_summary(result, post_numbers, topic)
main_summary =
SummarySection.create!(
target: topic,
algorithm: strategy.model,
content_range: (post_numbers.first..post_numbers.last),
summarized_text: result[:summary],
original_content_sha: build_sha(post_numbers),
)
result[:chunks].each do |chunk|
SummarySection.create!(
target: topic,
algorithm: strategy.model,
content_range: chunk[:ids].min..chunk[:ids].max,
summarized_text: chunk[:summary],
original_content_sha: build_sha(chunk[:ids]),
meta_section_id: main_summary.id,
)
end
main_summary
end
def build_sha(ids)
Digest::SHA256.hexdigest(ids.join)
end
end
``` | # frozen_string_literal: true
describe TopicSummarization do
fab!(:user) { Fabricate(:admin) }
fab!(:topic) { Fabricate(:topic, highest_post_number: 2) }
fab!(:post_1) { Fabricate(:post, topic: topic, post_number: 1) }
fab!(:post_2) { Fabricate(:post, topic: topic, post_number: 2) }
shared_examples "includes only public-visible topics" do
subject { described_class.new(DummyCustomSummarization.new({})) }
it "only includes visible posts" do
topic.first_post.update!(hidden: true)
posts = subject.summary_targets(topic)
expect(posts.none?(&:hidden?)).to eq(true)
end
it "doesn't include posts without users" do
topic.first_post.user.destroy!
posts = subject.summary_targets(topic)
expect(posts.detect { |p| p.id == topic.first_post.id }).to be_nil
end
it "doesn't include deleted posts" do
topic.first_post.update!(user_id: nil)
posts = subject.summary_targets(topic)
expect(posts.detect { |p| p.id == topic.first_post.id }).to be_nil
end
end
describe "#summary_targets" do
context "when the topic has a best replies summary" do
before { topic.has_summary = true }
it_behaves_like "includes only public-visible topics"
end
context "when the topic doesn't have a best replies summary" do
before { topic.has_summary = false }
it_behaves_like "includes only public-visible topics"
end
end
describe "#summarize" do
subject(:summarization) { described_class.new(strategy) }
let(:strategy) { DummyCustomSummarization.new(summary) }
def assert_summary_is_cached(topic, summary_response)
cached_summary = SummarySection.find_by(target: topic, meta_section_id: nil)
expect(cached_summary.content_range).to cover(*topic.posts.map(&:post_number))
expect(cached_summary.summarized_text).to eq(summary_response[:summary])
expect(cached_summary.original_content_sha).to be_present
expect(cached_summary.algorithm).to eq(strategy.model)
end
def assert_chunk_is_cached(topic, chunk_response)
cached_chunk =
SummarySection
.where.not(meta_section_id: nil)
.find_by(
target: topic,
content_range: (chunk_response[:ids].min..chunk_response[:ids].max),
)
expect(cached_chunk.summarized_text).to eq(chunk_response[:summary])
expect(cached_chunk.original_content_sha).to be_present
expect(cached_chunk.algorithm).to eq(strategy.model)
end
context "when the content was summarized in a single chunk" do
let(:summary) { { summary: "This is the final summary", chunks: [] } }
it "caches the summary" do
section = summarization.summarize(topic, user)
expect(section.summarized_text).to eq(summary[:summary])
assert_summary_is_cached(topic, summary)
end
it "returns the cached version in subsequent calls" do
summarization.summarize(topic, user)
cached_summary_text = "This is a cached summary"
cached_summary =
SummarySection.find_by(target: topic, meta_section_id: nil).update!(
summarized_text: cached_summary_text,
updated_at: 24.hours.ago,
)
section = summarization.summarize(topic, user)
expect(section.summarized_text).to eq(cached_summary_text)
end
end
context "when the content was summarized in multiple chunks" do
let(:summary) do
{
summary: "This is the final summary",
chunks: [
{ ids: [topic.first_post.post_number], summary: "this is the first chunk" },
{ ids: [post_1.post_number, post_2.post_number], summary: "this is the second chunk" },
],
}
end
it "caches the summary and each chunk" do
section = summarization.summarize(topic, user)
expect(section.summarized_text).to eq(summary[:summary])
assert_summary_is_cached(topic, summary)
summary[:chunks].each { |c| assert_chunk_is_cached(topic, c) }
end
end
describe "invalidating cached summaries" do
let(:cached_text) { "This is a cached summary" }
let(:summarized_text) { "This is the final summary" }
let(:summary) do
{
summary: summarized_text,
chunks: [
{ ids: [topic.first_post.post_number], summary: "this is the first chunk" },
{ ids: [post_1.post_number, post_2.post_number], summary: "this is the second chunk" },
],
}
end
def cached_summary
SummarySection.find_by(target: topic, meta_section_id: nil)
end
before do
summarization.summarize(topic, user)
cached_summary.update!(summarized_text: cached_text, created_at: 24.hours.ago)
end
context "when the user can requests new summaries" do
context "when there are no new posts" do
it "returns the cached summary" do
section = summarization.summarize(topic, user)
expect(section.summarized_text).to eq(cached_text)
end
end
context "when there are new posts" do
before { cached_summary.update!(original_content_sha: "outdated_sha") }
it "returns a new summary" do
section = summarization.summarize(topic, user)
expect(section.summarized_text).to eq(summarized_text)
end
context "when the cached summary is less than one hour old" do
before { cached_summary.update!(created_at: 30.minutes.ago) }
it "returns the cached summary" do
cached_summary.update!(created_at: 30.minutes.ago)
section = summarization.summarize(topic, user)
expect(section.summarized_text).to eq(cached_text)
expect(section.outdated).to eq(true)
end
it "returns a new summary if the skip_age_check flag is passed" do
section = summarization.summarize(topic, user, skip_age_check: true)
expect(section.summarized_text).to eq(summarized_text)
end
end
end
end
end
describe "stream partial updates" do
let(:summary) { { summary: "This is the final summary", chunks: [] } }
it "receives a blk that is passed to the underlying strategy and called with partial summaries" do
partial_result = nil
summarization.summarize(topic, user) { |partial_summary| partial_result = partial_summary }
expect(partial_result).to eq(summary[:summary])
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Responsible for logging the actions of admins and moderators.
class StaffActionLogger
def self.base_attrs
%i[topic_id post_id context subject ip_address previous_value new_value]
end
def initialize(admin)
@admin = admin
raise Discourse::InvalidParameters.new(:admin) unless @admin && @admin.is_a?(User)
end
USER_FIELDS ||= %i[id username name created_at trust_level last_seen_at last_emailed_at]
def log_user_deletion(deleted_user, opts = {})
unless deleted_user && deleted_user.is_a?(User)
raise Discourse::InvalidParameters.new(:deleted_user)
end
details = USER_FIELDS.map { |x| "#{x}: #{deleted_user.public_send(x)}" }.join("\n")
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:delete_user],
ip_address: deleted_user.ip_address.to_s,
details: details,
),
)
end
def log_custom(custom_type, details = nil)
raise Discourse::InvalidParameters.new(:custom_type) unless custom_type
details ||= {}
attrs = {}
StaffActionLogger.base_attrs.each do |attr|
attrs[attr] = details.delete(attr) if details.has_key?(attr)
end
attrs[:details] = details.map { |r| "#{r[0]}: #{r[1]}" }.join("\n")
attrs[:acting_user_id] = @admin.id
attrs[:action] = UserHistory.actions[:custom_staff]
attrs[:custom_type] = custom_type
UserHistory.create!(attrs)
end
def edit_directory_columns_details(column_data, directory_column)
directory_column = directory_column.attributes.transform_values(&:to_s)
previous_value = directory_column
new_value = directory_column.clone
directory_column.each do |key, value|
if column_data[key] != value && column_data[key].present?
new_value[key] = column_data[key]
elsif key != "name"
previous_value.delete key
new_value.delete key
end
end
[previous_value.to_json, new_value.to_json]
end
def log_post_deletion(deleted_post, opts = {})
unless deleted_post && deleted_post.is_a?(Post)
raise Discourse::InvalidParameters.new(:deleted_post)
end
topic = deleted_post.topic || Topic.with_deleted.find_by(id: deleted_post.topic_id)
username = deleted_post.user.try(:username) || I18n.t("staff_action_logs.unknown")
name = deleted_post.user.try(:name) || I18n.t("staff_action_logs.unknown")
topic_title = topic.try(:title) || I18n.t("staff_action_logs.not_found")
details = [
"id: #{deleted_post.id}",
"created_at: #{deleted_post.created_at}",
"user: #{username} (#{name})",
"topic: #{topic_title}",
"post_number: #{deleted_post.post_number}",
"raw: #{deleted_post.raw}",
]
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:delete_post],
post_id: deleted_post.id,
details: details.join("\n"),
),
)
end
def log_topic_delete_recover(topic, action = "delete_topic", opts = {})
raise Discourse::InvalidParameters.new(:topic) unless topic && topic.is_a?(Topic)
user = topic.user ? "#{topic.user.username} (#{topic.user.name})" : "(deleted user)"
details = [
"id: #{topic.id}",
"created_at: #{topic.created_at}",
"user: #{user}",
"title: #{topic.title}",
]
if first_post = topic.ordered_posts.with_deleted.first
details << "raw: #{first_post.raw}"
end
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[action.to_sym],
topic_id: topic.id,
details: details.join("\n"),
),
)
end
def log_trust_level_change(user, old_trust_level, new_trust_level, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user && user.is_a?(User)
unless TrustLevel.valid? old_trust_level
raise Discourse::InvalidParameters.new(:old_trust_level)
end
unless TrustLevel.valid? new_trust_level
raise Discourse::InvalidParameters.new(:new_trust_level)
end
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_trust_level],
target_user_id: user.id,
previous_value: old_trust_level,
new_value: new_trust_level,
),
)
end
def log_lock_trust_level(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user && user.is_a?(User)
action =
UserHistory.actions[
user.manual_locked_trust_level.nil? ? :unlock_trust_level : :lock_trust_level
]
UserHistory.create!(params(opts).merge(action: action, target_user_id: user.id))
end
def log_topic_published(topic, opts = {})
raise Discourse::InvalidParameters.new(:topic) unless topic && topic.is_a?(Topic)
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:topic_published], topic_id: topic.id),
)
end
def log_topic_timestamps_changed(topic, new_timestamp, previous_timestamp, opts = {})
raise Discourse::InvalidParameters.new(:topic) unless topic && topic.is_a?(Topic)
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:topic_timestamps_changed],
topic_id: topic.id,
new_value: new_timestamp,
previous_value: previous_timestamp,
),
)
end
def log_post_lock(post, opts = {})
raise Discourse::InvalidParameters.new(:post) unless post && post.is_a?(Post)
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[opts[:locked] ? :post_locked : :post_unlocked],
post_id: post.id,
),
)
end
def log_post_edit(post, opts = {})
raise Discourse::InvalidParameters.new(:post) unless post && post.is_a?(Post)
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:post_edit],
post_id: post.id,
details: "#{opts[:old_raw]}\n\n---\n\n#{post.raw}",
),
)
end
def log_topic_closed(topic, opts = {})
raise Discourse::InvalidParameters.new(:topic) unless topic && topic.is_a?(Topic)
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[opts[:closed] ? :topic_closed : :topic_opened],
topic_id: topic.id,
),
)
end
def log_topic_archived(topic, opts = {})
raise Discourse::InvalidParameters.new(:topic) unless topic && topic.is_a?(Topic)
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[opts[:archived] ? :topic_archived : :topic_unarchived],
topic_id: topic.id,
),
)
end
def log_post_staff_note(post, opts = {})
raise Discourse::InvalidParameters.new(:post) unless post && post.is_a?(Post)
args =
params(opts).merge(
action:
UserHistory.actions[
opts[:new_value].present? ? :post_staff_note_create : :post_staff_note_destroy
],
post_id: post.id,
)
args[:new_value] = opts[:new_value] if opts[:new_value].present?
args[:previous_value] = opts[:old_value] if opts[:old_value].present?
UserHistory.create!(params(opts).merge(args))
end
def log_site_setting_change(setting_name, previous_value, new_value, opts = {})
unless setting_name.present? && SiteSetting.respond_to?(setting_name)
raise Discourse::InvalidParameters.new(:setting_name)
end
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_site_setting],
subject: setting_name,
previous_value: previous_value&.to_s,
new_value: new_value&.to_s,
),
)
end
def theme_json(theme)
ThemeSerializer.new(theme, root: false, include_theme_field_values: true).to_json
end
def strip_duplicates(old, cur)
return old, cur unless old && cur
old = JSON.parse(old)
cur = JSON.parse(cur)
old.each do |k, v|
next if k == "name"
next if k == "id"
if (v == cur[k])
cur.delete(k)
old.delete(k)
end
end
[old.to_json, cur.to_json]
end
def log_theme_change(old_json, new_theme, opts = {})
raise Discourse::InvalidParameters.new(:new_theme) unless new_theme
new_json = theme_json(new_theme)
old_json, new_json = strip_duplicates(old_json, new_json)
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_theme],
subject: new_theme.name,
previous_value: old_json,
new_value: new_json,
),
)
end
def log_theme_destroy(theme, opts = {})
raise Discourse::InvalidParameters.new(:theme) unless theme
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:delete_theme],
subject: theme.name,
previous_value: theme_json(theme),
),
)
end
def log_theme_component_disabled(component)
UserHistory.create!(
params.merge(
action: UserHistory.actions[:disable_theme_component],
subject: component.name,
context: component.id,
),
)
end
def log_theme_component_enabled(component)
UserHistory.create!(
params.merge(
action: UserHistory.actions[:enable_theme_component],
subject: component.name,
context: component.id,
),
)
end
def log_theme_setting_change(setting_name, previous_value, new_value, theme, opts = {})
raise Discourse::InvalidParameters.new(:theme) unless theme
unless theme.cached_settings.has_key?(setting_name)
raise Discourse::InvalidParameters.new(:setting_name)
end
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_theme_setting],
subject: "#{theme.name}: #{setting_name.to_s}",
previous_value: previous_value,
new_value: new_value,
),
)
end
def log_site_text_change(subject, new_text = nil, old_text = nil, opts = {})
raise Discourse::InvalidParameters.new(:subject) unless subject.present?
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_site_text],
subject: subject,
previous_value: old_text,
new_value: new_text,
),
)
end
def log_username_change(user, old_username, new_username, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_username],
target_user_id: user.id,
previous_value: old_username,
new_value: new_username,
),
)
end
def log_name_change(user_id, old_name, new_name, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user_id
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_name],
target_user_id: user_id,
previous_value: old_name,
new_value: new_name,
),
)
end
def log_user_suspend(user, reason, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
details = StaffMessageFormat.new(:suspend, reason, opts[:message]).format
args =
params(opts).merge(
action: UserHistory.actions[:suspend_user],
target_user_id: user.id,
details: details,
)
args[:post_id] = opts[:post_id] if opts[:post_id]
UserHistory.create!(args)
end
def log_user_unsuspend(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:unsuspend_user], target_user_id: user.id),
)
end
def log_user_merge(user, source_username, source_email, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:merge_user],
target_user_id: user.id,
context: I18n.t("staff_action_logs.user_merged", username: source_username),
email: source_email,
),
)
end
BADGE_FIELDS ||= %i[
id
name
description
long_description
icon
image_upload_id
badge_type_id
badge_grouping_id
query
allow_title
multiple_grant
listable
target_posts
enabled
auto_revoke
show_posts
system
]
def log_badge_creation(badge)
raise Discourse::InvalidParameters.new(:badge) unless badge
details =
BADGE_FIELDS
.map { |f| [f, badge.public_send(f)] }
.select { |f, v| v.present? }
.map { |f, v| "#{f}: #{v}" }
UserHistory.create!(
params.merge(action: UserHistory.actions[:create_badge], details: details.join("\n")),
)
end
def log_badge_change(badge)
raise Discourse::InvalidParameters.new(:badge) unless badge
details = ["id: #{badge.id}"]
badge.previous_changes.each do |f, values|
details << "#{f}: #{values[1]}" if BADGE_FIELDS.include?(f.to_sym)
end
UserHistory.create!(
params.merge(action: UserHistory.actions[:change_badge], details: details.join("\n")),
)
end
def log_badge_deletion(badge)
raise Discourse::InvalidParameters.new(:badge) unless badge
details =
BADGE_FIELDS
.map { |f| [f, badge.public_send(f)] }
.select { |f, v| v.present? }
.map { |f, v| "#{f}: #{v}" }
UserHistory.create!(
params.merge(action: UserHistory.actions[:delete_badge], details: details.join("\n")),
)
end
def log_badge_grant(user_badge, opts = {})
raise Discourse::InvalidParameters.new(:user_badge) unless user_badge
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:grant_badge],
target_user_id: user_badge.user_id,
details: user_badge.badge.name,
),
)
end
def log_badge_revoke(user_badge, opts = {})
raise Discourse::InvalidParameters.new(:user_badge) unless user_badge
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:revoke_badge],
target_user_id: user_badge.user_id,
details: user_badge.badge.name,
),
)
end
def log_title_revoke(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:revoke_title],
target_user_id: user.id,
details: opts[:revoke_reason],
previous_value: opts[:previous_value],
),
)
end
def log_title_change(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:change_title],
target_user_id: user.id,
details: opts[:details],
new_value: opts[:new_value],
previous_value: opts[:previous_value],
),
)
end
def log_change_upload_secure_status(opts = {})
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:override_upload_secure_status],
details: [
"upload_id: #{opts[:upload_id]}",
"reason: #{I18n.t("uploads.marked_insecure_from_theme_component_reason")}",
].join("\n"),
new_value: opts[:new_value],
),
)
end
def log_check_email(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:check_email], target_user_id: user.id),
)
end
def log_show_emails(users, opts = {})
return if users.blank?
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:check_email],
details: users.map { |u| "[#{u.id}] #{u.username}" }.join("\n"),
),
)
end
def log_impersonate(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:impersonate], target_user_id: user.id),
)
end
def log_roll_up(subnet, ips, opts = {})
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:roll_up],
details: "#{subnet} from #{ips.join(", ")}",
),
)
end
def log_category_settings_change(
category,
category_params,
old_permissions: nil,
old_custom_fields: nil
)
validate_category(category)
changed_attributes = category.previous_changes.slice(*category_params.keys)
if !old_permissions.empty? && (old_permissions != category_params[:permissions])
changed_attributes.merge!(
permissions: [old_permissions.to_json, category_params[:permissions].to_json],
)
end
if old_custom_fields && category_params[:custom_fields]
category_params[:custom_fields].each do |key, value|
next if old_custom_fields[key] == value
changed_attributes["custom_fields[#{key}]"] = [old_custom_fields[key], value]
end
end
changed_attributes.each do |key, value|
UserHistory.create!(
params.merge(
action: UserHistory.actions[:change_category_settings],
category_id: category.id,
context: category.url,
subject: key,
previous_value: value[0],
new_value: value[1],
),
)
end
end
def log_category_deletion(category)
validate_category(category)
details = [
"created_at: #{category.created_at}",
"name: #{category.name}",
"permissions: #{category.permissions_params}",
]
if parent_category = category.parent_category
details << "parent_category: #{parent_category.name}"
end
UserHistory.create!(
params.merge(
action: UserHistory.actions[:delete_category],
category_id: category.id,
details: details.join("\n"),
context: category.url,
),
)
end
def log_category_creation(category)
validate_category(category)
details = ["created_at: #{category.created_at}", "name: #{category.name}"]
UserHistory.create!(
params.merge(
action: UserHistory.actions[:create_category],
details: details.join("\n"),
category_id: category.id,
context: category.url,
),
)
end
def log_silence_user(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
create_args =
params(opts).merge(
action: UserHistory.actions[:silence_user],
target_user_id: user.id,
details: opts[:details],
)
create_args[:post_id] = opts[:post_id] if opts[:post_id]
UserHistory.create!(create_args)
end
def log_unsilence_user(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:unsilence_user], target_user_id: user.id),
)
end
def log_disable_second_factor_auth(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:disabled_second_factor],
target_user_id: user.id,
),
)
end
def log_grant_admin(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:grant_admin], target_user_id: user.id),
)
end
def log_revoke_admin(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:revoke_admin], target_user_id: user.id),
)
end
def log_grant_moderation(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:grant_moderation], target_user_id: user.id),
)
end
def log_revoke_moderation(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:revoke_moderation], target_user_id: user.id),
)
end
def log_backup_create(opts = {})
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:backup_create],
ip_address: @admin.ip_address.to_s,
),
)
end
def log_entity_export(entity, opts = {})
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:entity_export],
ip_address: @admin.ip_address.to_s,
subject: entity,
),
)
end
def log_backup_download(backup, opts = {})
raise Discourse::InvalidParameters.new(:backup) unless backup
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:backup_download],
ip_address: @admin.ip_address.to_s,
details: backup.filename,
),
)
end
def log_backup_destroy(backup, opts = {})
raise Discourse::InvalidParameters.new(:backup) unless backup
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:backup_destroy],
ip_address: @admin.ip_address.to_s,
details: backup.filename,
),
)
end
def log_revoke_email(user, reason, opts = {})
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:revoke_email],
target_user_id: user.id,
details: reason,
),
)
end
def log_user_approve(user, opts = {})
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:approve_user], target_user_id: user.id),
)
end
def log_user_deactivate(user, reason, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:deactivate_user],
target_user_id: user.id,
details: reason,
),
)
end
def log_user_activate(user, reason, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:activate_user],
target_user_id: user.id,
details: reason,
),
)
end
def log_wizard_step(step, opts = {})
raise Discourse::InvalidParameters.new(:step) unless step
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:wizard_step], context: step.id),
)
end
def log_change_readonly_mode(state)
UserHistory.create!(
params.merge(
action: UserHistory.actions[:change_readonly_mode],
previous_value: !state,
new_value: state,
),
)
end
def log_check_personal_message(topic, opts = {})
raise Discourse::InvalidParameters.new(:topic) unless topic && topic.is_a?(Topic)
UserHistory.create!(
params(opts).merge(
action: UserHistory.actions[:check_personal_message],
topic_id: topic.id,
context: topic.relative_url,
),
)
end
def log_post_approved(post, opts = {})
raise Discourse::InvalidParameters.new(:post) unless post.is_a?(Post)
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:post_approved], post_id: post.id),
)
end
def log_post_rejected(reviewable, rejected_at, opts = {})
raise Discourse::InvalidParameters.new(:rejected_post) unless reviewable.is_a?(Reviewable)
topic = reviewable.topic || Topic.with_deleted.find_by(id: reviewable.topic_id)
topic_title = topic&.title || I18n.t("staff_action_logs.not_found")
username = reviewable.target_created_by&.username || I18n.t("staff_action_logs.unknown")
name = reviewable.target_created_by&.name || I18n.t("staff_action_logs.unknown")
details = [
"created_at: #{reviewable.created_at}",
"rejected_at: #{rejected_at}",
"user: #{username} (#{name})",
"topic: #{topic_title}",
"raw: #{reviewable.payload["raw"]}",
]
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:post_rejected], details: details.join("\n")),
)
end
def log_web_hook(web_hook, action, opts = {})
details = ["webhook_id: #{web_hook.id}", "payload_url: #{web_hook.payload_url}"]
old_values, new_values = get_changes(opts[:changes])
UserHistory.create!(
params(opts).merge(
action: action,
context: details.join(", "),
previous_value: old_values&.join(", "),
new_value: new_values&.join(", "),
),
)
end
def log_web_hook_deactivate(web_hook, response_http_status, opts = {})
context = ["webhook_id: #{web_hook.id}", "webhook_response_status: #{response_http_status}"]
UserHistory.create!(
params.merge(
action: UserHistory.actions[:web_hook_deactivate],
context: context,
details:
I18n.t("staff_action_logs.webhook_deactivation_reason", status: response_http_status),
),
)
end
def log_embeddable_host(embeddable_host, action, opts = {})
old_values, new_values = get_changes(opts[:changes])
UserHistory.create!(
params(opts).merge(
action: action,
context: "host: #{embeddable_host.host}",
previous_value: old_values&.join(", "),
new_value: new_values&.join(", "),
),
)
end
def log_api_key(api_key, action, opts = {})
opts[:changes]&.delete("key") # Do not log the full key
history_params = params(opts).merge(action: action, subject: api_key.truncated_key)
if opts[:changes]
old_values, new_values = get_changes(opts[:changes])
history_params[:previous_value] = old_values&.join(", ") unless opts[:changes].keys.include?(
"id",
)
history_params[:new_value] = new_values&.join(", ")
end
UserHistory.create!(history_params)
end
def log_api_key_revoke(api_key)
UserHistory.create!(
params.merge(
subject: api_key.truncated_key,
action: UserHistory.actions[:api_key_update],
details: I18n.t("staff_action_logs.api_key.revoked"),
),
)
end
def log_api_key_restore(api_key)
UserHistory.create!(
params.merge(
subject: api_key.truncated_key,
action: UserHistory.actions[:api_key_update],
details: I18n.t("staff_action_logs.api_key.restored"),
),
)
end
def log_published_page(topic_id, slug)
UserHistory.create!(
params.merge(subject: slug, topic_id: topic_id, action: UserHistory.actions[:page_published]),
)
end
def log_unpublished_page(topic_id, slug)
UserHistory.create!(
params.merge(
subject: slug,
topic_id: topic_id,
action: UserHistory.actions[:page_unpublished],
),
)
end
def log_add_email(user)
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
action: UserHistory.actions[:add_email],
acting_user_id: @admin.id,
target_user_id: user.id,
)
end
def log_update_email(user)
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
action: UserHistory.actions[:update_email],
acting_user_id: @admin.id,
target_user_id: user.id,
)
end
def log_destroy_email(user)
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
action: UserHistory.actions[:destroy_email],
acting_user_id: @admin.id,
target_user_id: user.id,
)
end
def log_watched_words_creation(watched_word)
raise Discourse::InvalidParameters.new(:watched_word) unless watched_word
UserHistory.create!(
action: UserHistory.actions[:watched_word_create],
acting_user_id: @admin.id,
details: watched_word.action_log_details,
context: WatchedWord.actions[watched_word.action],
)
end
def log_watched_words_deletion(watched_word)
raise Discourse::InvalidParameters.new(:watched_word) unless watched_word
UserHistory.create!(
action: UserHistory.actions[:watched_word_destroy],
acting_user_id: @admin.id,
details: watched_word.action_log_details,
context: WatchedWord.actions[watched_word.action],
)
end
def log_group_deletion(group)
raise Discourse::InvalidParameters.new(:group) if group.nil?
details = ["name: #{group.name}", "id: #{group.id}"]
details << "grant_trust_level: #{group.grant_trust_level}" if group.grant_trust_level
UserHistory.create!(
acting_user_id: @admin.id,
action: UserHistory.actions[:delete_group],
details: details.join(", "),
)
end
def log_permanently_delete_post_revisions(post)
raise Discourse::InvalidParameters.new(:post) if post.nil?
UserHistory.create!(
action: UserHistory.actions[:permanently_delete_post_revisions],
acting_user_id: @admin.id,
post_id: post.id,
)
end
def log_create_public_sidebar_section(section)
UserHistory.create!(
action: UserHistory.actions[:create_public_sidebar_section],
acting_user_id: @admin.id,
subject: section.title,
details: custom_section_details(section),
)
end
def log_update_public_sidebar_section(section)
UserHistory.create!(
action: UserHistory.actions[:update_public_sidebar_section],
acting_user_id: @admin.id,
subject: section.title,
details: custom_section_details(section),
)
end
def log_destroy_public_sidebar_section(section)
UserHistory.create!(
action: UserHistory.actions[:destroy_public_sidebar_section],
acting_user_id: @admin.id,
subject: section.title,
)
end
def log_reset_bounce_score(user, opts = {})
raise Discourse::InvalidParameters.new(:user) unless user
UserHistory.create!(
params(opts).merge(action: UserHistory.actions[:reset_bounce_score], target_user_id: user.id),
)
end
private
def get_changes(changes)
return unless changes
changes.delete("updated_at")
old_values = []
new_values = []
changes
.sort_by { |k, _| k.to_s }
.each do |k, v|
old_values << "#{k}: #{v[0]}"
new_values << "#{k}: #{v[1]}"
end
[old_values, new_values]
end
def params(opts = nil)
opts ||= {}
{ acting_user_id: @admin.id, context: opts[:context], details: opts[:details] }
end
def validate_category(category)
raise Discourse::InvalidParameters.new(:category) unless category && category.is_a?(Category)
end
def custom_section_details(section)
urls = section.sidebar_urls.map { |url| "#{url.name} - #{url.value}" }
"links: #{urls.join(", ")}"
end
end
``` | # frozen_string_literal: true
RSpec.describe StaffActionLogger do
fab!(:admin)
let(:logger) { described_class.new(admin) }
describe "new" do
it "raises an error when user is nil" do
expect { described_class.new(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when user is not a User" do
expect { described_class.new(5) }.to raise_error(Discourse::InvalidParameters)
end
end
describe "log_user_deletion" do
subject(:log_user_deletion) { described_class.new(admin).log_user_deletion(deleted_user) }
fab!(:deleted_user) { Fabricate(:user) }
it "raises an error when user is nil" do
expect { logger.log_user_deletion(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when user is not a User" do
expect { logger.log_user_deletion(1) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { log_user_deletion }.to change { UserHistory.count }.by(1)
end
end
describe "log_show_emails" do
it "logs the user history" do
expect { logger.log_show_emails([admin]) }.to change(UserHistory, :count).by(1)
end
it "doesn't raise an exception with nothing to log" do
expect { logger.log_show_emails([]) }.not_to raise_error
end
it "doesn't raise an exception with nil input" do
expect { logger.log_show_emails(nil) }.not_to raise_error
end
end
describe "log_post_deletion" do
subject(:log_post_deletion) { described_class.new(admin).log_post_deletion(deleted_post) }
fab!(:deleted_post) { Fabricate(:post) }
it "raises an error when post is nil" do
expect { logger.log_post_deletion(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when post is not a Post" do
expect { logger.log_post_deletion(1) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { log_post_deletion }.to change { UserHistory.count }.by(1)
end
it "does not explode if post does not have a user" do
expect {
deleted_post.update_columns(user_id: nil)
log_post_deletion
}.to change { UserHistory.count }.by(1)
end
end
describe "log_topic_delete_recover" do
fab!(:topic)
context "when deleting topic" do
subject(:log_topic_delete_recover) do
described_class.new(admin).log_topic_delete_recover(topic)
end
it "raises an error when topic is nil" do
expect { logger.log_topic_delete_recover(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when topic is not a Topic" do
expect { logger.log_topic_delete_recover(1) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { log_topic_delete_recover }.to change { UserHistory.count }.by(1)
end
end
context "when recovering topic" do
subject(:log_topic_delete_recover) do
described_class.new(admin).log_topic_delete_recover(topic, "recover_topic")
end
it "raises an error when topic is nil" do
expect { logger.log_topic_delete_recover(nil, "recover_topic") }.to raise_error(
Discourse::InvalidParameters,
)
end
it "raises an error when topic is not a Topic" do
expect { logger.log_topic_delete_recover(1, "recover_topic") }.to raise_error(
Discourse::InvalidParameters,
)
end
it "creates a new UserHistory record" do
expect { log_topic_delete_recover }.to change { UserHistory.count }.by(1)
end
end
end
describe "log_trust_level_change" do
subject(:log_trust_level_change) do
described_class.new(admin).log_trust_level_change(user, old_trust_level, new_trust_level)
end
fab!(:user)
let(:old_trust_level) { TrustLevel[0] }
let(:new_trust_level) { TrustLevel[1] }
it "raises an error when user or trust level is nil" do
expect {
logger.log_trust_level_change(nil, old_trust_level, new_trust_level)
}.to raise_error(Discourse::InvalidParameters)
expect { logger.log_trust_level_change(user, nil, new_trust_level) }.to raise_error(
Discourse::InvalidParameters,
)
expect { logger.log_trust_level_change(user, old_trust_level, nil) }.to raise_error(
Discourse::InvalidParameters,
)
end
it "raises an error when user is not a User" do
expect { logger.log_trust_level_change(1, old_trust_level, new_trust_level) }.to raise_error(
Discourse::InvalidParameters,
)
end
it "raises an error when new trust level is not a Trust Level" do
max_level = TrustLevel.valid_range.max
expect { logger.log_trust_level_change(user, old_trust_level, max_level + 1) }.to raise_error(
Discourse::InvalidParameters,
)
end
it "creates a new UserHistory record" do
expect { log_trust_level_change }.to change { UserHistory.count }.by(1)
expect(UserHistory.last.previous_value).to eq(old_trust_level.to_s)
expect(UserHistory.last.new_value).to eq(new_trust_level.to_s)
end
end
describe "log_site_setting_change" do
it "raises an error when params are invalid" do
expect { logger.log_site_setting_change(nil, "1", "2") }.to raise_error(
Discourse::InvalidParameters,
)
expect {
logger.log_site_setting_change("i_am_a_site_setting_that_will_never_exist", "1", "2")
}.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { logger.log_site_setting_change("title", "Discourse", "My Site") }.to change {
UserHistory.count
}.by(1)
end
it "logs boolean values" do
log_record = logger.log_site_setting_change("allow_user_locale", true, false)
expect(log_record.previous_value).to eq("true")
expect(log_record.new_value).to eq("false")
end
it "logs nil values" do
log_record = logger.log_site_setting_change("title", nil, nil)
expect(log_record.previous_value).to be_nil
expect(log_record.new_value).to be_nil
end
end
describe "log_theme_change" do
it "raises an error when params are invalid" do
expect { logger.log_theme_change(nil, nil) }.to raise_error(Discourse::InvalidParameters)
end
let! :theme do
Fabricate(:theme)
end
it "logs new site customizations" do
log_record = logger.log_theme_change(nil, theme)
expect(log_record.subject).to eq(theme.name)
expect(log_record.previous_value).to eq(nil)
expect(log_record.new_value).to be_present
json = ::JSON.parse(log_record.new_value)
expect(json["name"]).to eq(theme.name)
end
it "logs updated site customizations" do
old_json = ThemeSerializer.new(theme, root: false).to_json
theme.set_field(target: :common, name: :scss, value: "body{margin: 10px;}")
log_record = logger.log_theme_change(old_json, theme)
expect(log_record.previous_value).to be_present
json = ::JSON.parse(log_record.new_value)
expect(json["theme_fields"]).to eq(
[
{
"name" => "scss",
"target" => "common",
"value" => "body{margin: 10px;}",
"type_id" => 1,
},
],
)
end
end
describe "log_theme_destroy" do
it "raises an error when params are invalid" do
expect { logger.log_theme_destroy(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
theme = Fabricate(:theme)
theme.set_field(target: :common, name: :scss, value: "body{margin: 10px;}")
log_record = logger.log_theme_destroy(theme)
expect(log_record.previous_value).to be_present
expect(log_record.new_value).to eq(nil)
json = ::JSON.parse(log_record.previous_value)
expect(json["theme_fields"]).to eq(
[
{
"name" => "scss",
"target" => "common",
"value" => "body{margin: 10px;}",
"type_id" => 1,
},
],
)
end
end
describe "log_theme_setting_change" do
it "raises an error when params are invalid" do
expect { logger.log_theme_setting_change(nil, nil, nil, nil) }.to raise_error(
Discourse::InvalidParameters,
)
end
let! :theme do
Fabricate(:theme)
end
before do
theme.set_field(target: :settings, name: :yaml, value: "custom_setting: special")
theme.save!
end
it "raises an error when theme setting is invalid" do
expect {
logger.log_theme_setting_change(:inexistent_setting, nil, nil, theme)
}.to raise_error(Discourse::InvalidParameters)
end
it "logs theme setting changes" do
log_record =
logger.log_theme_setting_change(:custom_setting, "special", "notsospecial", theme)
expect(log_record.subject).to eq("#{theme.name}: custom_setting")
expect(log_record.previous_value).to eq("special")
expect(log_record.new_value).to eq("notsospecial")
end
end
describe "log_site_text_change" do
it "raises an error when params are invalid" do
expect { logger.log_site_text_change(nil, "new text", "old text") }.to raise_error(
Discourse::InvalidParameters,
)
end
it "creates a new UserHistory record" do
expect { logger.log_site_text_change("created", "new text", "old text") }.to change {
UserHistory.count
}.by(1)
end
end
describe "log_user_suspend" do
fab!(:user) { Fabricate(:user, suspended_at: 10.minutes.ago, suspended_till: 1.day.from_now) }
it "raises an error when arguments are missing" do
expect { logger.log_user_suspend(nil, nil) }.to raise_error(Discourse::InvalidParameters)
expect { logger.log_user_suspend(nil, "He was bad.") }.to raise_error(
Discourse::InvalidParameters,
)
end
it "reason arg is optional" do
expect { logger.log_user_suspend(user, nil) }.to_not raise_error
end
it "creates a new UserHistory record" do
reason = "He was a big meanie."
log_record = logger.log_user_suspend(user, reason)
expect(log_record).to be_valid
expect(log_record.details).to eq(reason)
expect(log_record.target_user).to eq(user)
end
end
describe "log_user_unsuspend" do
fab!(:user) { Fabricate(:user, suspended_at: 1.day.ago, suspended_till: 7.days.from_now) }
it "raises an error when argument is missing" do
expect { logger.log_user_unsuspend(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
log_record = logger.log_user_unsuspend(user)
expect(log_record).to be_valid
expect(log_record.target_user).to eq(user)
end
end
describe "log_badge_grant" do
let(:user) { Fabricate(:user) }
let(:badge) { Fabricate(:badge) }
let(:user_badge) { BadgeGranter.grant(badge, user) }
it "raises an error when argument is missing" do
expect { logger.log_badge_grant(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
log_record = logger.log_badge_grant(user_badge)
expect(log_record).to be_valid
expect(log_record.target_user).to eq(user)
expect(log_record.details).to eq(badge.name)
end
end
describe "log_badge_revoke" do
fab!(:user)
fab!(:badge)
let(:user_badge) { BadgeGranter.grant(badge, user) }
it "raises an error when argument is missing" do
expect { logger.log_badge_revoke(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
log_record = logger.log_badge_revoke(user_badge)
expect(log_record).to be_valid
expect(log_record.target_user).to eq(user)
expect(log_record.details).to eq(badge.name)
end
end
describe "log_roll_up" do
subject(:log_roll_up) { described_class.new(admin).log_roll_up(subnet, ips) }
let(:subnet) { "1.2.3.0/24" }
let(:ips) { %w[1.2.3.4 1.2.3.100] }
it "creates a new UserHistory record" do
log_record = logger.log_roll_up(subnet, ips)
expect(log_record).to be_valid
expect(log_record.details).to eq("#{subnet} from #{ips.join(", ")}")
end
end
describe "log_custom" do
it "raises an error when `custom_type` is missing" do
expect { logger.log_custom(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates the UserHistory record" do
logged =
logger.log_custom("clicked_something", evil: "trout", clicked_on: "thing", topic_id: 1234)
expect(logged).to be_valid
expect(logged.details).to eq("evil: trout\nclicked_on: thing")
expect(logged.action).to eq(UserHistory.actions[:custom_staff])
expect(logged.custom_type).to eq("clicked_something")
expect(logged.topic_id).to be === 1234
end
end
describe "log_category_settings_change" do
let(:category) { Fabricate(:category, name: "haha") }
let(:category_group) { Fabricate(:category_group, category: category, permission_type: 1) }
it "raises an error when category is missing" do
expect { logger.log_category_settings_change(nil, nil) }.to raise_error(
Discourse::InvalidParameters,
)
end
it "creates new UserHistory records" do
attributes = { name: "new_name", permissions: { category_group.group_name => 2 } }
category.update!(attributes)
logger.log_category_settings_change(
category,
attributes,
old_permissions: {
category_group.group_name => category_group.permission_type,
},
)
expect(UserHistory.count).to eq(2)
permission_user_history = UserHistory.find_by_subject("permissions")
expect(permission_user_history.category_id).to eq(category.id)
expect(permission_user_history.previous_value).to eq(
{ category_group.group_name => 1 }.to_json,
)
expect(permission_user_history.new_value).to eq({ category_group.group_name => 2 }.to_json)
expect(permission_user_history.action).to eq(UserHistory.actions[:change_category_settings])
expect(permission_user_history.context).to eq(category.url)
name_user_history = UserHistory.find_by_subject("name")
expect(name_user_history.category).to eq(category)
expect(name_user_history.previous_value).to eq("haha")
expect(name_user_history.new_value).to eq("new_name")
end
it "logs permissions changes even if the category is visible to everyone" do
attributes = { name: "new_name" }
old_permission = { "everyone" => 1 }
category.update!(attributes)
logger.log_category_settings_change(
category,
attributes.merge(permissions: { "trust_level_3" => 1 }),
old_permissions: old_permission,
)
expect(UserHistory.count).to eq(2)
expect(UserHistory.find_by_subject("name").category).to eq(category)
end
it "logs custom fields changes" do
attributes = { custom_fields: { "auto_populated" => "t" } }
category.update!(attributes)
logger.log_category_settings_change(
category,
attributes,
old_permissions: category.permissions_params,
old_custom_fields: {
},
)
expect(UserHistory.count).to eq(1)
end
it "does not log custom fields changes if value is unchanged" do
attributes = { custom_fields: { "auto_populated" => "t" } }
category.update!(attributes)
logger.log_category_settings_change(
category,
attributes,
old_permissions: category.permissions_params,
old_custom_fields: {
"auto_populated" => "t",
},
)
expect(UserHistory.count).to eq(0)
end
end
describe "log_category_deletion" do
fab!(:parent_category) { Fabricate(:category) }
fab!(:category) { Fabricate(:category, parent_category: parent_category) }
it "raises an error when category is missing" do
expect { logger.log_category_deletion(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
logger.log_category_deletion(category)
expect(UserHistory.count).to eq(1)
user_history = UserHistory.last
expect(user_history.subject).to eq(nil)
expect(user_history.category).to eq(category)
expect(user_history.details).to include("parent_category: #{parent_category.name}")
expect(user_history.context).to eq(category.url)
expect(user_history.action).to eq(UserHistory.actions[:delete_category])
end
end
describe "log_category_creation" do
fab!(:category)
it "raises an error when category is missing" do
expect { logger.log_category_deletion(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
logger.log_category_creation(category)
expect(UserHistory.count).to eq(1)
user_history = UserHistory.last
expect(user_history.category).to eq(category)
expect(user_history.context).to eq(category.url)
expect(user_history.action).to eq(UserHistory.actions[:create_category])
end
end
describe "log_lock_trust_level" do
fab!(:user)
it "raises an error when argument is missing" do
expect { logger.log_lock_trust_level(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
user.manual_locked_trust_level = 3
expect { logger.log_lock_trust_level(user) }.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:lock_trust_level])
user.manual_locked_trust_level = nil
expect { logger.log_lock_trust_level(user) }.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:unlock_trust_level])
end
end
describe "log_user_activate" do
fab!(:user)
it "raises an error when argument is missing" do
expect { logger.log_user_activate(nil, nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
reason = "Staff activated from admin"
expect { logger.log_user_activate(user, reason) }.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:activate_user])
expect(user_history.details).to eq(reason)
end
end
describe "#log_readonly_mode" do
it "creates a new record" do
expect { logger.log_change_readonly_mode(true) }.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:change_readonly_mode])
expect(user_history.new_value).to eq("t")
expect(user_history.previous_value).to eq("f")
expect { logger.log_change_readonly_mode(false) }.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:change_readonly_mode])
expect(user_history.new_value).to eq("f")
expect(user_history.previous_value).to eq("t")
end
end
describe "log_check_personal_message" do
subject(:log_check_personal_message) do
described_class.new(admin).log_check_personal_message(personal_message)
end
fab!(:personal_message) { Fabricate(:private_message_topic) }
it "raises an error when topic is nil" do
expect { logger.log_check_personal_message(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when topic is not a Topic" do
expect { logger.log_check_personal_message(1) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { log_check_personal_message }.to change { UserHistory.count }.by(1)
end
end
describe "log_post_approved" do
subject(:log_post_approved) { described_class.new(admin).log_post_approved(approved_post) }
fab!(:approved_post) { Fabricate(:post) }
it "raises an error when post is nil" do
expect { logger.log_post_approved(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when post is not a Post" do
expect { logger.log_post_approved(1) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { log_post_approved }.to change { UserHistory.count }.by(1)
end
end
describe "log_post_rejected" do
subject(:log_post_rejected) do
described_class.new(admin).log_post_rejected(reviewable, DateTime.now)
end
fab!(:reviewable) { Fabricate(:reviewable_queued_post) }
it "raises an error when reviewable not supplied" do
expect { logger.log_post_rejected(nil, DateTime.now) }.to raise_error(
Discourse::InvalidParameters,
)
expect { logger.log_post_rejected(1, DateTime.now) }.to raise_error(
Discourse::InvalidParameters,
)
end
it "creates a new UserHistory record" do
expect { log_post_rejected }.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:post_rejected])
expect(user_history.details).to include(reviewable.payload["raw"])
end
it "works if the user was destroyed" do
reviewable.created_by.destroy
reviewable.reload
expect { log_post_rejected }.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:post_rejected])
expect(user_history.details).to include(reviewable.payload["raw"])
end
end
describe "log_topic_closed" do
fab!(:topic)
it "raises an error when argument is missing" do
expect { logger.log_topic_closed(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { logger.log_topic_closed(topic, closed: true) }.to change {
UserHistory.where(action: UserHistory.actions[:topic_closed]).count
}.by(1)
expect { logger.log_topic_closed(topic, closed: false) }.to change {
UserHistory.where(action: UserHistory.actions[:topic_opened]).count
}.by(1)
end
end
describe "log_topic_archived" do
fab!(:topic)
it "raises an error when argument is missing" do
expect { logger.log_topic_archived(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect { logger.log_topic_archived(topic, archived: true) }.to change {
UserHistory.where(action: UserHistory.actions[:topic_archived]).count
}.by(1)
expect { logger.log_topic_archived(topic, archived: false) }.to change {
UserHistory.where(action: UserHistory.actions[:topic_unarchived]).count
}.by(1)
end
end
describe "log_post_staff_note" do
fab!(:post)
it "raises an error when argument is missing" do
expect { logger.log_topic_archived(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
expect {
logger.log_post_staff_note(post, { new_value: "my note", old_value: nil })
}.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:post_staff_note_create])
expect(user_history.new_value).to eq("my note")
expect(user_history.previous_value).to eq(nil)
expect {
logger.log_post_staff_note(post, { new_value: nil, old_value: "my note" })
}.to change { UserHistory.count }.by(1)
user_history = UserHistory.last
expect(user_history.action).to eq(UserHistory.actions[:post_staff_note_destroy])
expect(user_history.new_value).to eq(nil)
expect(user_history.previous_value).to eq("my note")
end
end
describe "#log_watched_words_creation" do
fab!(:watched_word) { Fabricate(:watched_word, action: WatchedWord.actions[:block]) }
it "raises an error when watched_word is missing" do
expect { logger.log_watched_words_creation(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
logger.log_watched_words_creation(watched_word)
expect(UserHistory.count).to eq(1)
user_history = UserHistory.last
expect(user_history.subject).to eq(nil)
expect(user_history.details).to include(watched_word.word)
expect(user_history.context).to eq("block")
expect(user_history.action).to eq(UserHistory.actions[:watched_word_create])
end
end
describe "#log_watched_words_deletion" do
fab!(:watched_word) { Fabricate(:watched_word, action: WatchedWord.actions[:block]) }
it "raises an error when watched_word is missing" do
expect { logger.log_watched_words_deletion(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "creates a new UserHistory record" do
logger.log_watched_words_deletion(watched_word)
expect(UserHistory.count).to eq(1)
user_history = UserHistory.last
expect(user_history.subject).to eq(nil)
expect(user_history.details).to include(watched_word.word)
expect(user_history.context).to eq("block")
expect(user_history.action).to eq(UserHistory.actions[:watched_word_destroy])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
require "net/imap"
require "net/smtp"
require "net/pop"
# Usage:
#
# begin
# EmailSettingsValidator.validate_imap(host: "imap.test.com", port: 999, username: "[email protected]", password: "password")
#
# # or for specific host preset
# EmailSettingsValidator.validate_imap(**{ username: "[email protected]", password: "test" }.merge(Email.gmail_imap_settings))
#
# rescue *EmailSettingsExceptionHandler::EXPECTED_EXCEPTIONS => err
# EmailSettingsExceptionHandler.friendly_exception_message(err, host)
# end
class EmailSettingsValidator
def self.validate_as_user(user, protocol, **kwargs)
DistributedMutex.synchronize("validate_#{protocol}_#{user.id}", validity: 10) do
self.public_send("validate_#{protocol}", **kwargs)
end
end
##
# Attempts to authenticate and disconnect a POP3 session and if that raises
# an error then it is assumed the credentials or some other settings are wrong.
#
# @param debug [Boolean] - When set to true, any errors will be logged at a warning
# level before being re-raised.
def self.validate_pop3(
host:,
port:,
username:,
password:,
ssl: SiteSetting.pop3_polling_ssl,
openssl_verify: SiteSetting.pop3_polling_openssl_verify,
debug: Rails.env.development?
)
begin
pop3 = Net::POP3.new(host, port)
# Note that we do not allow which verification mode to be specified
# like we do for SMTP, we just pick TLS1_2 if the SSL and openSSL verify
# options have been enabled.
if ssl
if openssl_verify
pop3.enable_ssl(max_version: OpenSSL::SSL::TLS1_2_VERSION)
else
pop3.enable_ssl(OpenSSL::SSL::VERIFY_NONE)
end
end
# This disconnects itself, unlike SMTP and IMAP.
pop3.auth_only(username, password)
rescue => err
log_and_raise(err, debug)
end
end
##
# Attempts to start an SMTP session and if that raises an error then it is
# assumed the credentials or other settings are wrong.
#
# For Gmail, the port should be 587, enable_starttls_auto should be true,
# and enable_tls should be false.
#
# @param domain [String] - Used for HELO, will be the email sender's domain, so often
# will just be the host e.g. the domain for [email protected] is gmail.com.
# localhost can be used in development mode.
# See https://datatracker.ietf.org/doc/html/rfc788#section-4
# @param debug [Boolean] - When set to true, any errors will be logged at a warning
# level before being re-raised.
def self.validate_smtp(
host:,
port:,
username:,
password:,
domain: nil,
authentication: GlobalSetting.smtp_authentication,
enable_starttls_auto: GlobalSetting.smtp_enable_start_tls,
enable_tls: GlobalSetting.smtp_force_tls,
openssl_verify_mode: GlobalSetting.smtp_openssl_verify_mode,
debug: Rails.env.development?
)
begin
port, enable_tls, enable_starttls_auto =
provider_specific_ssl_overrides(host, port, enable_tls, enable_starttls_auto)
if enable_tls && enable_starttls_auto
raise ArgumentError, "TLS and STARTTLS are mutually exclusive"
end
if !%i[plain login cram_md5].include?(authentication.to_sym)
raise ArgumentError, "Invalid authentication method. Must be plain, login, or cram_md5."
end
if domain.blank?
if Rails.env.development?
domain = "localhost"
else
# Because we are using the SMTP settings here to send emails,
# the domain should just be the TLD of the host.
domain = MiniSuffix.domain(host)
end
end
smtp = Net::SMTP.new(host, port)
# These SSL options are cribbed from the Mail gem, which is used internally
# by ActionMailer. Unfortunately the mail gem hides this setup in private
# methods, e.g. https://github.com/mikel/mail/blob/master/lib/mail/network/delivery_methods/smtp.rb#L112-L147
#
# Relying on the GlobalSetting options is a good idea here.
#
# For specific use cases, options should be passed in from higher up. For example
# Gmail needs either port 465 and tls enabled, or port 587 and starttls_auto.
if openssl_verify_mode.kind_of?(String)
openssl_verify_mode = OpenSSL::SSL.const_get("VERIFY_#{openssl_verify_mode.upcase}")
end
ssl_context = Net::SMTP.default_ssl_context
ssl_context.verify_mode = openssl_verify_mode if openssl_verify_mode
smtp.enable_starttls_auto(ssl_context) if enable_starttls_auto
smtp.enable_tls(ssl_context) if enable_tls
smtp.open_timeout = 5
smtp.read_timeout = 5
smtp.start(domain, username, password, authentication.to_sym)
smtp.finish
rescue => err
log_and_raise(err, debug)
end
end
##
# Attempts to login, logout, and disconnect an IMAP session and if that raises
# an error then it is assumed the credentials or some other settings are wrong.
#
# @param debug [Boolean] - When set to true, any errors will be logged at a warning
# level before being re-raised.
def self.validate_imap(
host:,
port:,
username:,
password:,
open_timeout: 5,
ssl: true,
debug: false
)
begin
imap = Net::IMAP.new(host, port: port, ssl: ssl, open_timeout: open_timeout)
imap.login(username, password)
begin
imap.logout
rescue StandardError
nil
end
imap.disconnect
rescue => err
log_and_raise(err, debug)
end
end
def self.log_and_raise(err, debug)
if debug
Rails.logger.warn(
"[EmailSettingsValidator] Error encountered when validating email settings: #{err.message} #{err.backtrace.join("\n")}",
)
end
raise err
end
def self.provider_specific_ssl_overrides(host, port, enable_tls, enable_starttls_auto)
# Gmail acts weirdly if you do not use the correct combinations of
# TLS settings based on the port, we clean these up here for the user.
if host == "smtp.gmail.com"
if port.to_i == 587
enable_starttls_auto = true
enable_tls = false
elsif port.to_i == 465
enable_starttls_auto = false
enable_tls = true
end
end
[port, enable_tls, enable_starttls_auto]
end
end
``` | # frozen_string_literal: true
RSpec.describe EmailSettingsValidator do
let(:username) { "[email protected]" }
let(:password) { "mbdtf" }
describe "#validate_imap" do
let(:host) { "imap.gmail.com" }
let(:port) { 993 }
let(:net_imap_stub) do
obj = mock()
obj.stubs(:login).returns(true)
obj
end
before { Net::IMAP.stubs(:new).returns(net_imap_stub) }
it "is valid if no error is raised" do
net_imap_stub.stubs(:logout).returns(true)
net_imap_stub.stubs(:disconnect).returns(true)
expect {
described_class.validate_imap(
host: host,
port: port,
username: username,
password: password,
)
}.not_to raise_error
end
it "is invalid if an error is raised" do
net_imap_stub.stubs(:login).raises(
Net::IMAP::NoResponseError,
stub(data: stub(text: "no response")),
)
expect {
described_class.validate_imap(
host: host,
port: port,
username: username,
password: password,
debug: true,
)
}.to raise_error(Net::IMAP::NoResponseError)
end
it "logs a warning if debug: true passed in and still raises the error" do
net_imap_stub.stubs(:login).raises(
Net::IMAP::NoResponseError,
stub(data: stub(text: "no response")),
)
Rails
.logger
.expects(:warn)
.with(regexp_matches(/\[EmailSettingsValidator\] Error encountered/))
.at_least_once
expect {
described_class.validate_imap(
host: host,
port: port,
username: username,
password: password,
debug: true,
)
}.to raise_error(Net::IMAP::NoResponseError)
end
end
describe "#validate_pop3" do
let(:host) { "pop.gmail.com" }
let(:port) { 995 }
let(:net_pop3_stub) do
obj = mock()
obj.stubs(:auth_only).returns(true)
obj.stubs(:enable_ssl).returns(true)
obj
end
before { Net::POP3.stubs(:new).returns(net_pop3_stub) }
it "is valid if no error is raised" do
expect {
described_class.validate_pop3(
host: host,
port: port,
username: username,
password: password,
)
}.not_to raise_error
end
it "is invalid if an error is raised" do
net_pop3_stub.stubs(:auth_only).raises(Net::POPAuthenticationError, "invalid credentials")
expect {
described_class.validate_pop3(
host: host,
port: port,
username: username,
password: password,
debug: true,
)
}.to raise_error(Net::POPAuthenticationError)
end
it "logs a warning if debug: true passed in and still raises the error" do
Rails
.logger
.expects(:warn)
.with(regexp_matches(/\[EmailSettingsValidator\] Error encountered/))
.at_least_once
net_pop3_stub.stubs(:auth_only).raises(Net::POPAuthenticationError, "invalid credentials")
expect {
described_class.validate_pop3(
host: host,
port: port,
username: username,
password: password,
debug: true,
)
}.to raise_error(Net::POPAuthenticationError)
end
it "uses the correct ssl verify params if those settings are enabled" do
SiteSetting.pop3_polling_ssl = true
SiteSetting.pop3_polling_openssl_verify = true
net_pop3_stub.expects(:enable_ssl).with(max_version: OpenSSL::SSL::TLS1_2_VERSION)
expect {
described_class.validate_pop3(
host: host,
port: port,
username: username,
password: password,
)
}.not_to raise_error
end
it "uses the correct ssl verify params if openssl_verify is not enabled" do
SiteSetting.pop3_polling_ssl = true
SiteSetting.pop3_polling_openssl_verify = false
net_pop3_stub.expects(:enable_ssl).with(OpenSSL::SSL::VERIFY_NONE)
expect {
described_class.validate_pop3(
host: host,
port: port,
username: username,
password: password,
)
}.not_to raise_error
end
end
describe "#validate_smtp" do
let(:host) { "smtp.gmail.com" }
let(:port) { 587 }
let(:domain) { "gmail.com" }
let(:net_smtp_stub) do
obj = mock()
obj.stubs(:start).returns(true)
obj.stubs(:finish).returns(true)
obj.stubs(:enable_tls).returns(true)
obj.stubs(:enable_starttls_auto).returns(true)
obj.stubs(:open_timeout=)
obj.stubs(:read_timeout=)
obj
end
before { Net::SMTP.stubs(:new).returns(net_smtp_stub) }
it "is valid if no error is raised" do
expect {
described_class.validate_smtp(
host: host,
port: port,
username: username,
password: password,
domain: domain,
)
}.not_to raise_error
end
it "is invalid if an error is raised" do
net_smtp_stub.stubs(:start).raises(Net::SMTPAuthenticationError, "invalid credentials")
expect {
described_class.validate_smtp(
host: host,
port: port,
username: username,
password: password,
domain: domain,
)
}.to raise_error(Net::SMTPAuthenticationError)
end
it "logs a warning if debug: true passed in and still raises the error" do
Rails
.logger
.expects(:warn)
.with(regexp_matches(/\[EmailSettingsValidator\] Error encountered/))
.at_least_once
net_smtp_stub.stubs(:start).raises(
Net::SMTPAuthenticationError,
stub(message: "invalid credentials"),
)
expect {
described_class.validate_smtp(
host: host,
port: port,
username: username,
password: password,
debug: true,
domain: domain,
)
}.to raise_error(Net::SMTPAuthenticationError)
end
it "uses the correct ssl verify params for enable_tls if those settings are enabled" do
net_smtp_stub.expects(:enable_tls)
expect {
described_class.validate_smtp(
host: host,
port: 465,
username: username,
password: password,
domain: domain,
openssl_verify_mode: "peer",
enable_tls: true,
enable_starttls_auto: false,
)
}.not_to raise_error
end
it "uses the correct ssl verify params for enable_starttls_auto if those settings are enabled" do
net_smtp_stub.expects(:enable_starttls_auto)
expect {
described_class.validate_smtp(
host: host,
port: 587,
username: username,
password: password,
domain: domain,
openssl_verify_mode: "peer",
enable_tls: false,
enable_starttls_auto: true,
)
}.not_to raise_error
end
it "raises an ArgumentError if both enable_tls is true and enable_starttls_auto is true" do
expect {
described_class.validate_smtp(
host: host,
port: port,
username: username,
password: password,
domain: domain,
enable_ssl: true,
enable_starttls_auto: true,
)
}.to raise_error(ArgumentError)
end
it "raises an ArgumentError if a bad authentication method is used" do
expect {
described_class.validate_smtp(
host: host,
port: port,
username: username,
password: password,
domain: domain,
authentication: :rubber_stamp,
)
}.to raise_error(ArgumentError)
end
context "when the domain is not provided" do
let(:domain) { nil }
it "gets the domain from the host" do
net_smtp_stub.expects(:start).with("gmail.com", username, password, :plain)
described_class.validate_smtp(
host: host,
port: port,
username: username,
password: password,
enable_tls: true,
enable_starttls_auto: false,
)
end
it "uses localhost when in development mode" do
Rails.env.stubs(:development?).returns(true)
net_smtp_stub.expects(:start).with("localhost", username, password, :plain)
described_class.validate_smtp(
host: host,
port: port,
username: username,
password: password,
enable_tls: true,
enable_starttls_auto: false,
)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserStatCountUpdater
class << self
def increment!(post, user_stat: nil)
update_using_operator!(post, user_stat: user_stat, action: :increment!)
end
def decrement!(post, user_stat: nil)
update_using_operator!(post, user_stat: user_stat, action: :decrement!)
end
def set!(user_stat:, count:, count_column:)
return if user_stat.blank?
return if !%i[post_count topic_count].include?(count_column)
if SiteSetting.verbose_user_stat_count_logging && count < 0
Rails.logger.warn(
"Attempted to insert negative count into UserStat##{count_column} for user #{user_stat.user_id}, using 0 instead. Caller:\n #{caller[0..10].join("\n")}",
)
end
user_stat.update_column(count_column, [count, 0].max)
end
private
def update_using_operator!(post, user_stat: nil, action: :increment!)
return if !post&.topic
return if action == :increment! && post.topic.private_message?
stat = user_stat || post.user&.user_stat
return if stat.blank?
column =
if post.is_first_post?
:topic_count
elsif post.post_type == Post.types[:regular]
:post_count
end
return if column.blank?
# There are lingering bugs in the code base that does not properly increase the count when the status of the post
# changes. Since we have Job::DirectoryRefreshOlder which runs daily to reconcile the count, there is no need
# to trigger an error.
if action == :decrement! && stat.public_send(column) < 1
if SiteSetting.verbose_user_stat_count_logging
Rails.logger.warn(
"Attempted to insert negative count into UserStat##{column} for post with id '#{post.id}'. Caller:\n #{caller[0..10].join("\n")}",
)
end
return
end
stat.public_send(action, column)
end
end
end
``` | # frozen_string_literal: true
RSpec.describe UserStatCountUpdater do
fab!(:user)
fab!(:user_stat) { user.user_stat }
fab!(:post)
fab!(:post_2) { Fabricate(:post, topic: post.topic) }
before do
@orig_logger = Rails.logger
Rails.logger = @fake_logger = FakeLogger.new
SiteSetting.verbose_user_stat_count_logging = true
end
after { Rails.logger = @orig_logger }
it "should log the exception when a negative count is inserted" do
UserStatCountUpdater.decrement!(post, user_stat: user_stat)
expect(@fake_logger.warnings.last).to match("topic_count")
expect(@fake_logger.warnings.last).to match(post.id.to_s)
UserStatCountUpdater.decrement!(post_2, user_stat: user_stat)
expect(@fake_logger.warnings.last).to match("post_count")
expect(@fake_logger.warnings.last).to match(post_2.id.to_s)
end
it "should log the exception when a negative count will be inserted but 0 is used instead" do
UserStatCountUpdater.set!(user_stat: user_stat, count: -10, count_column: :post_count)
expect(@fake_logger.warnings.last).to match("post_count")
expect(@fake_logger.warnings.last).to match("using 0")
expect(@fake_logger.warnings.last).to match("user #{user_stat.user_id}")
expect(user_stat.reload.post_count).to eq(0)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserAuthenticator
def initialize(
user,
session,
authenticator_finder: Users::OmniauthCallbacksController,
require_password: true
)
@user = user
@session = session
if session&.dig(:authentication) && session[:authentication].is_a?(Hash)
@auth_result = Auth::Result.from_session_data(session[:authentication], user: user)
end
@authenticator_finder = authenticator_finder
@require_password = require_password
end
def start
if authenticated?
@user.active = true
@auth_result.apply_user_attributes!
elsif @require_password
@user.password_required!
end
@user.skip_email_validation = true if @auth_result && @auth_result.skip_email_validation
end
def has_authenticator?
!!authenticator
end
def finish
if authenticator
authenticator.after_create_account(@user, @auth_result)
confirm_email
end
@session[:authentication] = @auth_result = nil if @session&.dig(:authentication)
end
def email_valid?
@auth_result&.email_valid
end
def authenticated?
return false if !@auth_result
return false if @auth_result&.email&.downcase != @user.email.downcase
return false if !@auth_result.email_valid
true
end
private
def confirm_email
@user.activate if authenticated?
end
def authenticator
if authenticator_name
@authenticator ||= @authenticator_finder.find_authenticator(authenticator_name)
end
end
def authenticator_name
@auth_result&.authenticator_name
end
end
``` | # frozen_string_literal: true
RSpec.describe UserAuthenticator do
def github_auth(email_valid)
{
email: "[email protected]",
username: "joedoe546",
email_valid: email_valid,
omit_username: nil,
name: "Joe Doe 546",
authenticator_name: "github",
extra_data: {
provider: "github",
uid: "100",
},
skip_email_validation: false,
}
end
before { SiteSetting.enable_github_logins = true }
describe "#start" do
describe "without authentication session" do
it "should apply the right user attributes" do
user = User.new
UserAuthenticator.new(user, {}).start
expect(user.password_required?).to eq(true)
end
it "allows password requirement to be skipped" do
user = User.new
UserAuthenticator.new(user, {}, require_password: false).start
expect(user.password_required?).to eq(false)
end
end
end
describe "#finish" do
fab!(:group) { Fabricate(:group, automatic_membership_email_domains: "discourse.org") }
it "confirms email and adds the user to appropriate groups based on email" do
user = Fabricate(:user, email: "[email protected]")
expect(group.usernames).not_to include(user.username)
authentication = github_auth(true)
UserAuthenticator.new(user, { authentication: authentication }).finish
expect(user.email_confirmed?).to be_truthy
expect(group.usernames).to include(user.username)
end
it "doesn't confirm email if email is invalid" do
user = Fabricate(:user, email: "[email protected]")
authentication = github_auth(false)
UserAuthenticator.new(user, { authentication: authentication }).finish
expect(user.email_confirmed?).to be_falsey
expect(group.usernames).not_to include(user.username)
end
it "doesn't confirm email if it was changed" do
user = Fabricate(:user, email: "[email protected]")
authentication = github_auth(true)
UserAuthenticator.new(user, { authentication: authentication }).finish
expect(user.email_confirmed?).to be_falsey
expect(group.usernames).not_to include(user.username)
end
it "clears the authentication info from the session" do
user = Fabricate(:user, email: "[email protected]")
session = { authentication: github_auth(true) }
UserAuthenticator.new(user, session).finish
expect(user.email_confirmed?).to be_truthy
expect(session[:authentication]).to eq(nil)
end
it "raises an error for non-boolean values" do
user = Fabricate(:user, email: "[email protected]")
session = { authentication: github_auth("string") }
expect do UserAuthenticator.new(user, session).finish end.to raise_error ArgumentError
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Used as a data source via HashtagAutocompleteService to provide tag
# results when looking up a tag slug via markdown or searching for
# tags via the # autocomplete character.
class TagHashtagDataSource
def self.enabled?
SiteSetting.tagging_enabled
end
def self.icon
"tag"
end
def self.type
"tag"
end
def self.tag_to_hashtag_item(tag, guardian)
topic_count_column = Tag.topic_count_column(guardian)
tag =
Tag.new(
tag.slice(:id, :name, :description).merge(topic_count_column => tag[:count]),
) if tag.is_a?(Hash)
HashtagAutocompleteService::HashtagItem.new.tap do |item|
item.text = tag.name
item.secondary_text = "x#{tag.public_send(topic_count_column)}"
item.description = tag.description
item.slug = tag.name
item.relative_url = tag.url
item.icon = icon
item.id = tag.id
end
end
private_class_method :tag_to_hashtag_item
def self.lookup(guardian, slugs)
DiscourseTagging
.filter_visible(Tag.where_name(slugs), guardian)
.map { |tag| tag_to_hashtag_item(tag, guardian) }
end
def self.search(
guardian,
term,
limit,
condition = HashtagAutocompleteService.search_conditions[:contains]
)
tags_with_counts, _ =
DiscourseTagging.filter_allowed_tags(
guardian,
term: term,
term_type:
(
if condition == HashtagAutocompleteService.search_conditions[:starts_with]
DiscourseTagging.term_types[:starts_with]
else
DiscourseTagging.term_types[:contains]
end
),
with_context: true,
limit: limit,
order_search_results: true,
)
TagsController
.tag_counts_json(tags_with_counts, guardian)
.take(limit)
.map do |tag|
# We want the actual ID here not the `name` as tag_counts_json gives us.
tag[:id] = tags_with_counts.find { |t| t.name == tag[:name] }.id
tag_to_hashtag_item(tag, guardian)
end
end
def self.search_sort(search_results, _)
search_results.sort_by { |item| item.text.downcase }
end
def self.search_without_term(guardian, limit)
tags_with_counts, _ =
DiscourseTagging.filter_allowed_tags(
guardian,
with_context: true,
limit: limit,
order_popularity: true,
excluded_tag_names: DiscourseTagging.muted_tags(guardian.user),
)
TagsController
.tag_counts_json(tags_with_counts, guardian)
.take(limit)
.map { |tag| tag_to_hashtag_item(tag, guardian) }
end
end
``` | # frozen_string_literal: true
RSpec.describe TagHashtagDataSource do
fab!(:tag1) { Fabricate(:tag, name: "fact", public_topic_count: 0) }
fab!(:tag2) { Fabricate(:tag, name: "factor", public_topic_count: 5) }
fab!(:tag3) { Fabricate(:tag, name: "factory", public_topic_count: 4) }
fab!(:tag4) { Fabricate(:tag, name: "factorio", public_topic_count: 3) }
fab!(:tag5) { Fabricate(:tag, name: "factz", public_topic_count: 1) }
fab!(:user)
let(:guardian) { Guardian.new(user) }
describe "#enabled?" do
it "returns false if tagging is disabled" do
SiteSetting.tagging_enabled = false
expect(described_class.enabled?).to eq(false)
end
it "returns true if tagging is enabled" do
SiteSetting.tagging_enabled = true
expect(described_class.enabled?).to eq(true)
end
end
describe "#search" do
it "orders tag results by exact search match, then public topic count, then name" do
expect(described_class.search(guardian, "fact", 5).map(&:slug)).to eq(
%w[fact factor factory factorio factz],
)
end
it "does not get more than the limit" do
expect(described_class.search(guardian, "fact", 1).map(&:slug)).to eq(%w[fact])
end
it "does not get tags that the user does not have permission to see" do
Fabricate(:tag_group, permissions: { "staff" => 1 }, tag_names: ["fact"])
expect(described_class.search(guardian, "fact", 5).map(&:slug)).not_to include("fact")
end
it "returns an array of HashtagAutocompleteService::HashtagItem" do
expect(described_class.search(guardian, "fact", 1).first).to be_a(
HashtagAutocompleteService::HashtagItem,
)
end
it "includes the public topic count for the text of the tag in secondary text" do
expect(described_class.search(guardian, "fact", 5).map(&:secondary_text)).to eq(
%w[x0 x5 x4 x3 x1],
)
end
it "returns tags that are children of a TagGroup" do
parent_tag = Fabricate(:tag, name: "sidebar")
child_tag = Fabricate(:tag, name: "sidebar-v1")
tag_group = Fabricate(:tag_group, parent_tag: parent_tag, name: "Sidebar TG")
TagGroupMembership.create!(tag: child_tag, tag_group: tag_group)
expect(described_class.search(guardian, "sidebar-v", 5).map(&:slug)).to eq(%w[sidebar-v1])
end
end
describe "#search_without_term" do
it "returns distinct tags sorted by public topic count" do
expect(described_class.search_without_term(guardian, 5).map(&:slug)).to eq(
%w[factor factory factorio factz fact],
)
end
it "does not return tags the user does not have permission to view" do
Fabricate(:tag_group, permissions: { "staff" => 1 }, tag_names: ["factor"])
expect(described_class.search_without_term(guardian, 5).map(&:slug)).not_to include("factor")
end
it "does not return tags the user has muted" do
TagUser.create(user: user, tag: tag2, notification_level: TagUser.notification_levels[:muted])
expect(described_class.search_without_term(guardian, 5).map(&:slug)).not_to include("factor")
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# A service class that backfills the changes to the default sidebar categories and tags site settings.
#
# When a category/tag is removed from the site settings, the `SidebarSectionLink` records associated with the category/tag
# are deleted.
#
# When a category/tag is added to the site settings, a `SidebarSectionLink` record for the associated category/tag are
# created for all users that do not already have a `SidebarSectionLink` record for the category/tag.
class SidebarSiteSettingsBackfiller
def initialize(setting_name, previous_value:, new_value:)
@setting_name = setting_name
@linkable_klass, previous_ids, new_ids =
case setting_name
when "default_navigation_menu_categories"
[Category, previous_value.split("|"), new_value.split("|")]
when "default_navigation_menu_tags"
klass = Tag
[
klass,
klass.where(name: previous_value.split("|")).pluck(:id),
klass.where(name: new_value.split("|")).pluck(:id),
]
else
raise "Invalid setting_name"
end
@added_ids = new_ids - previous_ids
@removed_ids = previous_ids - new_ids
end
def backfill!
DistributedMutex.synchronize("backfill_sidebar_site_settings_#{@setting_name}") do
SidebarSectionLink.where(
linkable_type: @linkable_klass.to_s,
linkable_id: @removed_ids,
).delete_all
User
.real
.where(staged: false)
.select(:id)
.find_in_batches do |users|
rows = []
users.each do |user|
@added_ids.each do |linkable_id|
rows << {
user_id: user[:id],
linkable_type: @linkable_klass.to_s,
linkable_id: linkable_id,
}
end
end
SidebarSectionLink.insert_all(rows) if rows.present?
end
end
end
def number_of_users_to_backfill
select_statements = []
select_statements.push(<<~SQL) if @removed_ids.present?
SELECT
sidebar_section_links.user_id
FROM sidebar_section_links
WHERE sidebar_section_links.linkable_type = '#{@linkable_klass.to_s}'
AND sidebar_section_links.linkable_id IN (#{@removed_ids.join(",")})
SQL
if @added_ids.present?
# Returns the ids of users that will receive the new additions by excluding the users that already have the additions
# Note that we want to avoid doing a left outer join against the "sidebar_section_links" table as PG will end up having
# to do a full table join for both tables first which is less efficient and can be slow on large sites.
select_statements.push(<<~SQL)
SELECT
users.id
FROM users
WHERE users.id NOT IN (
SELECT
DISTINCT(sidebar_section_links.user_id)
FROM sidebar_section_links
WHERE sidebar_section_links.linkable_type = '#{@linkable_klass.to_s}'
AND sidebar_section_links.linkable_id IN (#{@added_ids.join(",")})
) AND users.id > 0 AND NOT users.staged
SQL
end
return 0 if select_statements.blank?
DB.query_single(<<~SQL)[0]
SELECT
COUNT(*)
FROM (#{select_statements.join("\nUNION DISTINCT\n")}) AS user_ids
SQL
end
end
``` | # frozen_string_literal: true
RSpec.describe SidebarSiteSettingsBackfiller do
fab!(:user)
fab!(:user2) { Fabricate(:user) }
fab!(:user3) { Fabricate(:user) }
fab!(:staged_user) { Fabricate(:user, staged: true) }
fab!(:category)
fab!(:category2) { Fabricate(:category) }
fab!(:category3) { Fabricate(:category) }
fab!(:user_category_sidebar_section_link) do
Fabricate(:category_sidebar_section_link, user: user, linkable: category)
end
fab!(:user2_category_sidebar_section_link) do
Fabricate(:category_sidebar_section_link, user: user2, linkable: category)
end
fab!(:user3_category2_sidebar_section_link) do
Fabricate(:category_sidebar_section_link, user: user3, linkable: category2)
end
let!(:category_sidebar_section_link_ids) do
[
user_category_sidebar_section_link.id,
user2_category_sidebar_section_link.id,
user3_category2_sidebar_section_link.id,
]
end
fab!(:tag)
fab!(:tag2) { Fabricate(:tag) }
fab!(:tag3) { Fabricate(:tag) }
fab!(:user_tag_sidebar_section_link) do
Fabricate(:tag_sidebar_section_link, user: user, linkable: tag)
end
fab!(:user2_tag_sidebar_section_link) do
Fabricate(:tag_sidebar_section_link, user: user2, linkable: tag)
end
fab!(:user3_tag2_sidebar_section_link) do
Fabricate(:tag_sidebar_section_link, user: user3, linkable: tag2)
end
let!(:tag_sidebar_section_link_ids) do
[
user_tag_sidebar_section_link.id,
user2_tag_sidebar_section_link.id,
user3_tag2_sidebar_section_link.id,
]
end
before do
# Clean up random users created as part of fabrication to make assertions easier to understand.
User.real.where("id NOT IN (?)", [user.id, user2.id, user3.id, staged_user.id]).delete_all
end
it "raises an error when class is initialized with invalid setting name" do
expect do
described_class.new("some_random_setting_name", previous_value: "", new_value: "")
end.to raise_error(RuntimeError, "Invalid setting_name")
end
describe "#backfill!" do
context "for default_navigation_menu_categories setting" do
it "deletes the right sidebar section link records when categories are removed" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "#{category.id}|#{category2.id}|#{category3.id}",
new_value: "#{category3.id}",
)
expect do backfiller.backfill! end.to change { SidebarSectionLink.count }.by(-3)
expect(SidebarSectionLink.exists?(id: category_sidebar_section_link_ids)).to eq(false)
end
it "updates the right sidebar section link records when categories are added" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "#{category.id}|#{category2.id}",
new_value: "#{category.id}|#{category2.id}|#{category3.id}",
)
expect do backfiller.backfill! end.to change { SidebarSectionLink.count }.by(3)
expect(
SidebarSectionLink.where(linkable_type: "Category", linkable_id: category3.id).pluck(
:user_id,
),
).to contain_exactly(user.id, user2.id, user3.id)
end
it "creates the right sidebar section link records when categories are added" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "",
new_value: "#{category.id}|#{category2.id}|#{category3.id}",
)
expect do backfiller.backfill! end.to change { SidebarSectionLink.count }.by(6)
expect(
SidebarSectionLink.where(linkable_type: "Category", linkable_id: category.id).pluck(
:user_id,
),
).to contain_exactly(user.id, user2.id, user3.id)
expect(
SidebarSectionLink.where(linkable_type: "Category", linkable_id: category2.id).pluck(
:user_id,
),
).to contain_exactly(user.id, user2.id, user3.id)
expect(
SidebarSectionLink.where(linkable_type: "Category", linkable_id: category3.id).pluck(
:user_id,
),
).to contain_exactly(user.id, user2.id, user3.id)
end
it "deletes and creates the right sidebar section link records when categories are added and removed" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "#{category.id}|#{category2.id}",
new_value: "#{category3.id}",
)
original_count = SidebarSectionLink.count
expect do backfiller.backfill! end.to change {
SidebarSectionLink.where(linkable_type: "Category", linkable_id: category.id).count
}.by(-2).and change {
SidebarSectionLink.where(linkable_type: "Category", linkable_id: category2.id).count
}.by(-1).and change {
SidebarSectionLink.where(
linkable_type: "Category",
linkable_id: category3.id,
).count
}.by(3)
expect(SidebarSectionLink.count).to eq(original_count) # Net change of 0
expect(
SidebarSectionLink.where(linkable_type: "Category", linkable_id: category3.id).pluck(
:user_id,
),
).to contain_exactly(user.id, user2.id, user3.id)
end
end
context "for default_navigation_menu_tags setting" do
it "deletes the right sidebar section link records when tags are removed" do
backfiller =
described_class.new(
"default_navigation_menu_tags",
previous_value: "#{tag.name}|#{tag2.name}|#{tag3.name}",
new_value: "#{tag3.name}",
)
expect do backfiller.backfill! end.to change { SidebarSectionLink.count }.by(-3)
expect(SidebarSectionLink.exists?(id: tag_sidebar_section_link_ids)).to eq(false)
end
it "creates the right sidebar section link records when tags are added" do
backfiller =
described_class.new(
"default_navigation_menu_tags",
previous_value: "#{tag.name}|#{tag2.name}",
new_value: "#{tag.name}|#{tag2.name}|#{tag3.name}",
)
expect do backfiller.backfill! end.to change { SidebarSectionLink.count }.by(3)
expect(
SidebarSectionLink.where(linkable_type: "Tag", linkable_id: tag3.id).pluck(:user_id),
).to contain_exactly(user.id, user2.id, user3.id)
end
it "deletes and creates the right sidebar section link records when tags are added and removed" do
backfiller =
described_class.new(
"default_navigation_menu_tags",
previous_value: "#{tag.name}|#{tag2.name}",
new_value: "#{tag3.name}",
)
original_count = SidebarSectionLink.count
expect do backfiller.backfill! end.to change {
SidebarSectionLink.where(linkable_type: "Tag", linkable_id: tag.id).count
}.by(-2).and change {
SidebarSectionLink.where(linkable_type: "Tag", linkable_id: tag2.id).count
}.by(-1).and change {
SidebarSectionLink.where(linkable_type: "Tag", linkable_id: tag3.id).count
}.by(3)
expect(SidebarSectionLink.count).to eq(original_count) # net change of 0
expect(
SidebarSectionLink.where(linkable_type: "Tag", linkable_id: tag3.id).pluck(:user_id),
).to contain_exactly(user.id, user2.id, user3.id)
end
end
end
describe "#number_of_users_to_backfill" do
context "for default_navigation_menu_categories setting" do
it "returns 3 for the user count when a new category for all users is added" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "",
new_value: "#{category3.id}",
)
expect(backfiller.number_of_users_to_backfill).to eq(3)
end
it "returns 2 for the user count when category which 2 users have configured in sidebar is removed" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "#{category.id}|#{category2.id}",
new_value: "#{category2.id}",
)
expect(backfiller.number_of_users_to_backfill).to eq(2)
end
# category, category2 => category2, category3
it "returns 3 for the user count when a new category is added and a category is removed" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "#{category.id}|#{category2.id}",
new_value: "#{category2.id}|#{category3.id}",
)
expect(backfiller.number_of_users_to_backfill).to eq(3)
end
it "returns 0 for the user count when no new category is added or removed" do
backfiller =
described_class.new(
"default_navigation_menu_categories",
previous_value: "",
new_value: "",
)
expect(backfiller.number_of_users_to_backfill).to eq(0)
end
end
context "for default_navigation_menu_tags setting" do
it "returns 3 for the user count when a new tag for all users is added" do
backfiller =
described_class.new(
"default_navigation_menu_tags",
previous_value: "",
new_value: "#{tag3.name}",
)
expect(backfiller.number_of_users_to_backfill).to eq(3)
end
# tag, tag2 => tag2
it "returns 2 for the user count when tag which 2 users have configured in sidebar is removed" do
backfiller =
described_class.new(
"default_navigation_menu_tags",
previous_value: "#{tag.name}|#{tag2.name}",
new_value: "#{tag2.name}",
)
expect(backfiller.number_of_users_to_backfill).to eq(2)
end
# tag, tag2 => tag2, tag3
it "returns 3 for the user count when a new tag is added and a tag is removed" do
backfiller =
described_class.new(
"default_navigation_menu_tags",
previous_value: "#{tag.name}|#{tag2.name}",
new_value: "#{tag2.name}|#{tag3.name}",
)
expect(backfiller.number_of_users_to_backfill).to eq(3)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class DestroyTask
def initialize(io = STDOUT)
@io = io
end
def destroy_topics(category, parent_category = nil, delete_system_topics = false)
c = Category.find_by_slug(category, parent_category)
descriptive_slug = parent_category ? "#{parent_category}/#{category}" : category
return @io.puts "A category with the slug: #{descriptive_slug} could not be found" if c.nil?
if delete_system_topics
topics = Topic.where(category_id: c.id, pinned_at: nil)
else
topics = Topic.where(category_id: c.id, pinned_at: nil).where.not(user_id: -1)
end
@io.puts "There are #{topics.count} topics to delete in #{descriptive_slug} category"
topics.find_each do |topic|
@io.puts "Deleting #{topic.slug}..."
first_post = topic.ordered_posts.first
return @io.puts "Topic.ordered_posts.first was nil" if first_post.nil?
@io.puts PostDestroyer.new(Discourse.system_user, first_post).destroy
end
end
def destroy_topics_in_category(category_id, delete_system_topics = false)
c = Category.find(category_id)
return @io.puts "A category with the id: #{category_id} could not be found" if c.nil?
if delete_system_topics
topics = Topic.where(category_id: c.id, pinned_at: nil)
else
topics = Topic.where(category_id: c.id, pinned_at: nil).where.not(user_id: -1)
end
@io.puts "There are #{topics.count} topics to delete in #{c.slug} category"
topics.find_each do |topic|
first_post = topic.ordered_posts.first
return @io.puts "Topic.ordered_posts.first was nil for topic: #{topic.id}" if first_post.nil?
PostDestroyer.new(Discourse.system_user, first_post).destroy
end
topics = Topic.where(category_id: c.id, pinned_at: nil)
@io.puts "There are #{topics.count} topics that could not be deleted in #{c.slug} category"
end
def destroy_topics_all_categories
categories = Category.all
categories.each { |c| @io.puts destroy_topics(c.slug, c.parent_category&.slug) }
end
def destroy_private_messages
Topic
.where(archetype: "private_message")
.find_each do |pm|
@io.puts "Destroying #{pm.slug} pm"
first_post = pm.ordered_posts.first
@io.puts PostDestroyer.new(Discourse.system_user, first_post).destroy
end
end
def destroy_category(category_id, destroy_system_topics = false)
c = Category.find_by_id(category_id)
return @io.puts "A category with the id: #{category_id} could not be found" if c.nil?
subcategories = Category.where(parent_category_id: c.id)
@io.puts "There are #{subcategories.count} subcategories to delete" if subcategories
subcategories.each { |s| category_topic_destroyer(s, destroy_system_topics) }
category_topic_destroyer(c, destroy_system_topics)
end
def destroy_groups
groups = Group.where(automatic: false)
groups.each do |group|
@io.puts "destroying group: #{group.id}"
@io.puts group.destroy
end
end
def destroy_users
User
.human_users
.where(admin: false)
.find_each do |user|
begin
if UserDestroyer.new(Discourse.system_user).destroy(
user,
delete_posts: true,
context: "destroy task",
)
@io.puts "#{user.username} deleted"
else
@io.puts "#{user.username} not deleted"
end
rescue UserDestroyer::PostsExistError
raise Discourse::InvalidAccess.new(
"User #{user.username} has #{user.post_count} posts, so can't be deleted.",
)
rescue NoMethodError
@io.puts "#{user.username} could not be deleted"
rescue Discourse::InvalidAccess => e
@io.puts "#{user.username} #{e.message}"
end
end
end
def destroy_stats
ApplicationRequest.delete_all
IncomingLink.delete_all
UserVisit.delete_all
UserProfileView.delete_all
UserProfile.update_all(views: 0)
PostAction.unscoped.delete_all
EmailLog.delete_all
end
private
def category_topic_destroyer(category, destroy_system_topics = false)
destroy_topics_in_category(category.id, destroy_system_topics)
@io.puts "Destroying #{category.slug} category"
category.destroy
end
end
``` | # frozen_string_literal: true
RSpec.describe DestroyTask do
describe "destroy topics" do
fab!(:c) { Fabricate(:category_with_definition) }
fab!(:t) { Fabricate(:topic, category: c) }
let!(:p) { Fabricate(:post, topic: t) }
fab!(:c2) { Fabricate(:category_with_definition) }
fab!(:t2) { Fabricate(:topic, category: c2) }
let!(:p2) { Fabricate(:post, topic: t2) }
fab!(:sc) { Fabricate(:category_with_definition, parent_category: c2) }
fab!(:t3) { Fabricate(:topic, category: sc) }
let!(:p3) { Fabricate(:post, topic: t3) }
it "destroys all topics in a category" do
destroy_task = DestroyTask.new(StringIO.new)
expect { destroy_task.destroy_topics(c.slug) }.to change {
Topic.where(category_id: c.id).count
}.by (-1)
end
it "destroys all topics in a sub category" do
destroy_task = DestroyTask.new(StringIO.new)
expect { destroy_task.destroy_topics(sc.slug, c2.slug) }.to change {
Topic.where(category_id: sc.id).count
}.by(-1)
end
it "doesn't destroy system topics" do
destroy_task = DestroyTask.new(StringIO.new)
destroy_task.destroy_topics(c2.slug)
expect(Topic.where(category_id: c2.id).count).to eq 1
end
it "destroys topics in all categories" do
destroy_task = DestroyTask.new(StringIO.new)
destroy_task.destroy_topics_all_categories
expect(Post.where(topic_id: [t.id, t2.id, t3.id]).count).to eq 0
end
end
describe "destroy categories" do
fab!(:c) { Fabricate(:category_with_definition) }
fab!(:t) { Fabricate(:topic, category: c) }
let!(:p) { Fabricate(:post, topic: t) }
fab!(:c2) { Fabricate(:category_with_definition) }
fab!(:t2) { Fabricate(:topic, category: c) }
let!(:p2) { Fabricate(:post, topic: t2) }
fab!(:sc) { Fabricate(:category_with_definition, parent_category: c2) }
fab!(:t3) { Fabricate(:topic, category: sc) }
let!(:p3) { Fabricate(:post, topic: t3) }
it "destroys specified category" do
destroy_task = DestroyTask.new(StringIO.new)
expect { destroy_task.destroy_category(c.id) }.to change {
Category.where(id: c.id).count
}.by (-1)
end
it "destroys sub-categories when destroying parent category" do
destroy_task = DestroyTask.new(StringIO.new)
expect { destroy_task.destroy_category(c2.id) }.to change {
Category.where(id: sc.id).count
}.by (-1)
end
end
describe "private messages" do
let!(:pm) { Fabricate(:private_message_post) }
let!(:pm2) { Fabricate(:private_message_post) }
it "destroys all private messages" do
destroy_task = DestroyTask.new(StringIO.new)
destroy_task.destroy_private_messages
expect(Topic.where(archetype: "private_message").count).to eq 0
end
end
describe "groups" do
let!(:g) { Fabricate(:group) }
let!(:g2) { Fabricate(:group) }
it "destroys all groups" do
destroy_task = DestroyTask.new(StringIO.new)
destroy_task.destroy_groups
expect(Group.where(automatic: false).count).to eq 0
end
it "doesn't destroy default groups" do
destroy_task = DestroyTask.new(StringIO.new)
before_count = Group.count
destroy_task.destroy_groups
expect(Group.count).to eq before_count - 2
end
end
describe "users" do
it "destroys all non-admin users" do
before_count = User.count
Fabricate(:user)
Fabricate(:user)
Fabricate(:admin)
destroy_task = DestroyTask.new(StringIO.new)
destroy_task.destroy_users
expect(User.where(admin: false).count).to eq 0
# admin does not get destroyed
expect(User.count).to eq before_count + 1
end
end
describe "stats" do
it "destroys all site stats" do
destroy_task = DestroyTask.new(StringIO.new)
destroy_task.destroy_stats
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
require "net/imap"
require "net/smtp"
require "net/pop"
class EmailSettingsExceptionHandler
EXPECTED_EXCEPTIONS = [
Net::POPAuthenticationError,
Net::IMAP::NoResponseError,
Net::IMAP::Error,
Net::SMTPAuthenticationError,
Net::SMTPServerBusy,
Net::SMTPSyntaxError,
Net::SMTPFatalError,
Net::SMTPUnknownError,
Net::OpenTimeout,
Net::ReadTimeout,
SocketError,
Errno::ECONNREFUSED,
]
class GenericProvider
def initialize(exception)
@exception = exception
end
def message
case @exception
when Net::POPAuthenticationError
net_pop_authentication_error
when Net::IMAP::NoResponseError
net_imap_no_response_error
when Net::IMAP::Error
net_imap_unhandled_error
when Net::SMTPAuthenticationError
net_smtp_authentication_error
when Net::SMTPServerBusy
net_smtp_server_busy
when Net::SMTPSyntaxError, Net::SMTPFatalError, Net::SMTPUnknownError
net_smtp_unhandled_error
when SocketError, Errno::ECONNREFUSED
socket_connection_error
when Net::OpenTimeout, Net::ReadTimeout
net_timeout_error
else
unhandled_error
end
end
private
def net_pop_authentication_error
I18n.t("email_settings.pop3_authentication_error")
end
def net_imap_no_response_error
# Most of IMAP's errors are lumped under the NoResponseError, including invalid
# credentials errors, because it is raised when a "NO" response is
# raised from the IMAP server https://datatracker.ietf.org/doc/html/rfc3501#section-7.1.2
#
# Generally, it should be fairly safe to just return the error message as is.
if @exception.message.match(/Invalid credentials/)
I18n.t("email_settings.imap_authentication_error")
else
I18n.t(
"email_settings.imap_no_response_error",
message: @exception.message.gsub(" (Failure)", ""),
)
end
end
def net_imap_unhandled_error
I18n.t("email_settings.imap_unhandled_error", message: @exception.message)
end
def net_smtp_authentication_error
I18n.t("email_settings.smtp_authentication_error")
end
def net_smtp_server_busy
I18n.t("email_settings.smtp_server_busy_error")
end
def net_smtp_unhandled_error
I18n.t("email_settings.smtp_unhandled_error", message: @exception.message)
end
def socket_connection_error
I18n.t("email_settings.connection_error")
end
def net_timeout_error
I18n.t("email_settings.timeout_error")
end
def unhandled_error
I18n.t("email_settings.unhandled_error", message: @exception.message)
end
end
class GmailProvider < GenericProvider
def net_smtp_authentication_error
# Gmail requires use of application-specific passwords when 2FA is enabled and return
# a special error message calling this out.
if @exception.message.match(/Application-specific password required/)
I18n.t("email_settings.authentication_error_gmail_app_password")
else
super
end
end
def net_imap_no_response_error
# Gmail requires use of application-specific passwords when 2FA is enabled and return
# a special error message calling this out.
if @exception.message.match(/Application-specific password required/)
I18n.t("email_settings.authentication_error_gmail_app_password")
else
super
end
end
end
def self.friendly_exception_message(exception, host)
if host.include?("gmail.com")
EmailSettingsExceptionHandler::GmailProvider.new(exception).message
else
EmailSettingsExceptionHandler::GenericProvider.new(exception).message
end
end
end
``` | # frozen_string_literal: true
RSpec.describe EmailSettingsExceptionHandler do
describe "#friendly_exception_message" do
it "formats a Net::POPAuthenticationError" do
exception = Net::POPAuthenticationError.new("invalid credentials")
expect(described_class.friendly_exception_message(exception, "pop.test.com")).to eq(
I18n.t("email_settings.pop3_authentication_error"),
)
end
it "formats a Net::IMAP::NoResponseError for invalid credentials" do
exception = Net::IMAP::NoResponseError.new(stub(data: stub(text: "Invalid credentials")))
expect(described_class.friendly_exception_message(exception, "imap.test.com")).to eq(
I18n.t("email_settings.imap_authentication_error"),
)
end
it "formats a general Net::IMAP::NoResponseError" do
exception = Net::IMAP::NoResponseError.new(stub(data: stub(text: "NO bad problem (Failure)")))
expect(described_class.friendly_exception_message(exception, "imap.test.com")).to eq(
I18n.t("email_settings.imap_no_response_error", message: "NO bad problem"),
)
end
it "formats a general Net::IMAP::NoResponseError with application-specific password Gmail error" do
exception =
Net::IMAP::NoResponseError.new(
stub(data: stub(text: "NO Application-specific password required")),
)
expect(described_class.friendly_exception_message(exception, "imap.gmail.com")).to eq(
I18n.t("email_settings.authentication_error_gmail_app_password"),
)
end
it "formats a Net::SMTPAuthenticationError" do
exception = Net::SMTPAuthenticationError.new("invalid credentials")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.smtp_authentication_error"),
)
end
it "formats a Net::SMTPAuthenticationError with application-specific password Gmail error" do
exception =
Net::SMTPAuthenticationError.new(nil, message: "Application-specific password required")
expect(described_class.friendly_exception_message(exception, "smtp.gmail.com")).to eq(
I18n.t("email_settings.authentication_error_gmail_app_password"),
)
end
it "formats a Net::SMTPServerBusy" do
exception = Net::SMTPServerBusy.new("call me maybe later")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.smtp_server_busy_error"),
)
end
it "formats a Net::SMTPSyntaxError, Net::SMTPFatalError, and Net::SMTPUnknownError" do
exception = Net::SMTPSyntaxError.new(nil, message: "bad syntax")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.smtp_unhandled_error", message: exception.message),
)
exception = Net::SMTPFatalError.new(nil, message: "fatal")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.smtp_unhandled_error", message: exception.message),
)
exception = Net::SMTPUnknownError.new(nil, message: "unknown")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.smtp_unhandled_error", message: exception.message),
)
end
it "formats a SocketError and Errno::ECONNREFUSED" do
exception = SocketError.new("bad socket")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.connection_error"),
)
exception = Errno::ECONNREFUSED.new("no thanks")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.connection_error"),
)
end
it "formats a Net::OpenTimeout and Net::ReadTimeout error" do
exception = Net::OpenTimeout.new("timed out")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.timeout_error"),
)
exception = Net::ReadTimeout.new("timed out")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.timeout_error"),
)
end
it "formats unhandled errors" do
exception = StandardError.new("unknown")
expect(described_class.friendly_exception_message(exception, "smtp.test.com")).to eq(
I18n.t("email_settings.unhandled_error", message: exception.message),
)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PostOwnerChanger
def initialize(params)
@post_ids = params[:post_ids]
@topic = Topic.with_deleted.find_by(id: params[:topic_id].to_i)
@new_owner = params[:new_owner]
@acting_user = params[:acting_user]
@skip_revision = params[:skip_revision] || false
%i[post_ids topic new_owner acting_user].each do |arg|
raise ArgumentError.new(arg) if self.instance_variable_get("@#{arg}").blank?
end
end
def change_owner!
@post_ids.each do |post_id|
next unless post = Post.with_deleted.find_by(id: post_id, topic_id: @topic.id)
if post.is_first_post?
@topic.user = @new_owner
@topic.recover! if post.user.nil?
end
post.topic = @topic
post.set_owner(@new_owner, @acting_user, @skip_revision)
PostActionDestroyer.destroy(@new_owner, post, :like, skip_delete_check: true)
level = post.is_first_post? ? :watching : :tracking
TopicUser.change(
@new_owner.id,
@topic.id,
notification_level: NotificationLevels.topic_levels[level],
posted: true,
)
if post ==
@topic
.posts
.order("post_number DESC")
.where("NOT hidden AND posts.deleted_at IS NULL")
.first
@topic.last_poster = @new_owner
end
@topic.update_statistics
@new_owner.user_stat.update(
first_post_created_at: @new_owner.reload.posts.order("created_at ASC").first&.created_at,
)
Post.where(topic_id: @topic.id, reply_to_post_number: post.post_number).update_all(
reply_to_user_id: @new_owner.id,
)
@topic.save!(validate: false)
end
end
end
``` | # frozen_string_literal: true
RSpec.describe PostOwnerChanger do
describe "#change_owner!" do
fab!(:editor) { Fabricate(:admin) }
fab!(:user_a) { Fabricate(:user) }
let(:p1) { create_post(post_number: 1) }
let(:topic) { p1.topic }
let(:p2) { create_post(topic: topic, post_number: 2) }
let(:p3) { create_post }
it "raises an error with a parameter missing" do
expect {
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: nil,
acting_user: editor,
)
}.to raise_error(ArgumentError, /new_owner/)
end
it "calls PostRevisor" do
PostRevisor.any_instance.expects(:revise!)
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
end
it "changes the user" do
bumped_at = freeze_time topic.bumped_at
now = Time.zone.now
freeze_time(now - 1.day)
old_user = p1.user
PostActionCreator.like(user_a, p1)
p1.reload
expect(p1.topic.like_count).to eq(1)
freeze_time(now)
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
p1.reload
expect(p1.topic.like_count).to eq(0)
expect(p1.topic.bumped_at).to eq_time(bumped_at)
expect(p1.topic.last_post_user_id).to eq(user_a.id)
expect(old_user).not_to eq(p1.user)
expect(p1.user).to eq(user_a)
end
it "changes multiple posts" do
PostOwnerChanger.new(
post_ids: [p1.id, p2.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
p1.reload
p2.reload
expect(p1.user).not_to eq(nil)
expect(p1.user).to eq(user_a)
expect(p1.user).to eq(p2.user)
end
it "ignores posts in other topics" do
PostOwnerChanger.new(
post_ids: [p1.id, p3.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
p1.reload
p3.reload
expect(p1.user).to eq(user_a)
expect(p3.topic_id).not_to eq(p1.topic_id)
expect(p2.user).not_to eq(user_a)
end
it "skips creating new post revision if skip_revision is true" do
PostOwnerChanger.new(
post_ids: [p1.id, p2.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
skip_revision: true,
).change_owner!
p1.reload
p2.reload
expect(p1.revisions.size).to eq(0)
expect(p2.revisions.size).to eq(0)
end
it "changes the user even when the post does not pass validation" do
p1.update_attribute(:raw, "foo")
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
expect(p1.reload.user).to eq(user_a)
end
it "changes the user even when the topic does not pass validation" do
topic.update_column(:title, "short")
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
expect(p1.reload.user).to eq(user_a)
end
it "changes the owner when the post is deleted" do
p4 = Fabricate(:post, topic: topic, reply_to_post_number: p2.post_number)
PostDestroyer.new(editor, p4).destroy
PostOwnerChanger.new(
post_ids: [p4.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
expect(p4.reload.user).to eq(user_a)
end
it "sets 'posted' for TopicUser to true" do
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
expect(TopicUser.find_by(topic_id: topic.id, user_id: user_a.id).posted).to eq(true)
end
context "when setting topic notification level for the new owner" do
let(:p4) { create_post(post_number: 2, topic: topic) }
it "'watching' if the first post gets a new owner" do
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
tu = TopicUser.find_by(user_id: user_a.id, topic_id: topic.id)
expect(tu.notification_level).to eq(3)
end
it "'tracking' if other than the first post gets a new owner" do
PostOwnerChanger.new(
post_ids: [p4.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
tu = TopicUser.find_by(user_id: user_a.id, topic_id: topic.id)
expect(tu.notification_level).to eq(2)
end
end
context "with integration tests" do
subject(:change_owners) do
PostOwnerChanger.new(
post_ids: [p1.id, p2.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
end
let(:p1user) { p1.user }
let(:p2user) { p2.user }
before do
topic.update!(user_id: p1user.id)
p1user.user_stat.update!(
topic_count: 1,
post_count: 0,
first_post_created_at: p1.created_at,
)
p2user.user_stat.update!(
topic_count: 0,
post_count: 1,
first_post_created_at: p2.created_at,
)
UserAction.create!(
action_type: UserAction::NEW_TOPIC,
user_id: p1user.id,
acting_user_id: p1user.id,
target_post_id: -1,
target_topic_id: p1.topic_id,
created_at: p1.created_at,
)
UserAction.create!(
action_type: UserAction::REPLY,
user_id: p2user.id,
acting_user_id: p2user.id,
target_post_id: p2.id,
target_topic_id: p2.topic_id,
created_at: p2.created_at,
)
UserActionManager.enable
end
it "updates users' topic and post counts" do
PostActionCreator.like(p2user, p1)
expect(p1user.user_stat.reload.likes_received).to eq(1)
change_owners
p1user.reload
p2user.reload
user_a.reload
expect(p1user.topic_count).to eq(0)
expect(p1user.post_count).to eq(0)
expect(p2user.topic_count).to eq(0)
expect(p2user.post_count).to eq(0)
expect(user_a.topic_count).to eq(1)
expect(user_a.post_count).to eq(1)
p1_user_stat = p1user.user_stat
expect(p1_user_stat.first_post_created_at).to eq(nil)
expect(p1_user_stat.likes_received).to eq(0)
p2_user_stat = p2user.user_stat
expect(p2_user_stat.first_post_created_at).to eq(nil)
user_a_stat = user_a.user_stat
expect(user_a_stat.first_post_created_at).to be_present
expect(user_a_stat.likes_received).to eq(1)
end
it "handles whispers" do
whisper =
PostCreator.new(
editor,
topic_id: p1.topic_id,
reply_to_post_number: 1,
post_type: Post.types[:whisper],
raw: "this is a whispered reply",
).create
user_stat = editor.user_stat
expect {
PostOwnerChanger.new(
post_ids: [whisper.id],
topic_id: topic.id,
new_owner: Fabricate(:admin),
acting_user: editor,
).change_owner!
}.to_not change { user_stat.reload.post_count }
end
context "with private message topic" do
let(:pm) { create_post(archetype: "private_message", target_usernames: [p2user.username]) }
let(:pm_poster) { pm.user }
it "should update users' counts" do
PostActionCreator.like(p2user, pm)
expect {
PostOwnerChanger.new(
post_ids: [pm.id],
topic_id: pm.topic_id,
new_owner: user_a,
acting_user: editor,
).change_owner!
}.to_not change { pm_poster.user_stat.post_count }
expect(pm_poster.user_stat.likes_received).to eq(0)
user_a_stat = user_a.user_stat
expect(user_a_stat.first_post_created_at).to be_present
expect(user_a_stat.likes_received).to eq(0)
expect(user_a_stat.post_count).to eq(0)
end
end
it "updates UserAction records" do
g = Guardian.new(editor)
expect(UserAction.stats(user_a.id, g)).to eq([])
change_owners
expect(UserAction.stats(p1user.id, g)).to eq([])
expect(UserAction.stats(p2user.id, g)).to eq([])
stats = UserAction.stats(user_a.id, g)
expect(stats.size).to eq(2)
expect(stats[0].action_type).to eq(UserAction::NEW_TOPIC)
expect(stats[0].count).to eq(1)
expect(stats[1].action_type).to eq(UserAction::REPLY)
expect(stats[1].count).to eq(1)
end
it "updates reply_to_user_id" do
p4 =
Fabricate(
:post,
topic: topic,
reply_to_post_number: p1.post_number,
reply_to_user_id: p1.user_id,
)
p5 =
Fabricate(
:post,
topic: topic,
reply_to_post_number: p2.post_number,
reply_to_user_id: p2.user_id,
)
PostOwnerChanger.new(
post_ids: [p1.id],
topic_id: topic.id,
new_owner: user_a,
acting_user: editor,
).change_owner!
p4.reload
p5.reload
expect(p4.reply_to_user_id).to eq(user_a.id)
expect(p5.reply_to_user_id).to eq(p2.user_id)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class TopicTimestampChanger
class InvalidTimestampError < StandardError
end
def initialize(timestamp:, topic: nil, topic_id: nil)
@topic = topic || Topic.with_deleted.find(topic_id)
@posts = @topic.posts
@current_timestamp = Time.zone.now
@timestamp = Time.zone.at(timestamp)
raise InvalidTimestampError if @timestamp.to_f > @current_timestamp.to_f
@time_difference = calculate_time_difference
end
def change!
ActiveRecord::Base.transaction do
last_posted_at = @timestamp
@posts.each do |post|
if post.is_first_post?
update_post(post, @timestamp)
else
new_created_at = Time.at(post.created_at.to_f + @time_difference)
new_created_at = @current_timestamp if new_created_at > @current_timestamp
last_posted_at = new_created_at if new_created_at > last_posted_at
update_post(post, new_created_at)
end
end
@topic.reset_bumped_at
update_topic(last_posted_at)
yield(@topic) if block_given?
end
# Burst the cache for stats
[AdminDashboardData, About].each { |klass| Discourse.redis.del klass.stats_cache_key }
end
private
def calculate_time_difference
@timestamp - @topic.created_at
end
def update_topic(last_posted_at)
@topic.update(created_at: @timestamp, updated_at: @timestamp, last_posted_at: last_posted_at)
end
def update_post(post, timestamp)
post.update(created_at: timestamp, updated_at: timestamp)
end
end
``` | # frozen_string_literal: true
RSpec.describe TopicTimestampChanger do
describe "#change!" do
let(:old_timestamp) { Time.zone.now }
let(:topic) { Fabricate(:topic, created_at: old_timestamp) }
let!(:p1) { Fabricate(:post, topic: topic, created_at: old_timestamp) }
let!(:p2) { Fabricate(:post, topic: topic, created_at: old_timestamp + 1.day) }
context "when new timestamp is in the future" do
let(:new_timestamp) { old_timestamp + 2.day }
it "should raise the right error" do
expect {
TopicTimestampChanger.new(topic: topic, timestamp: new_timestamp.to_f).change!
}.to raise_error(TopicTimestampChanger::InvalidTimestampError)
end
end
context "when new timestamp is in the past" do
let(:new_timestamp) { old_timestamp - 2.day }
it "changes the timestamp of the topic and opening post" do
freeze_time
TopicTimestampChanger.new(topic: topic, timestamp: new_timestamp.to_f).change!
topic.reload
p1.reload
p2.reload
last_post_created_at = p2.created_at
expect(topic.created_at).to eq_time(new_timestamp)
expect(topic.updated_at).to eq_time(new_timestamp)
expect(topic.bumped_at).to eq_time(last_post_created_at)
expect(topic.last_posted_at).to eq_time(last_post_created_at)
expect(p1.created_at).to eq_time(new_timestamp)
expect(p1.updated_at).to eq_time(new_timestamp)
expect(p2.created_at).to eq_time(new_timestamp + 1.day)
expect(p2.updated_at).to eq_time(new_timestamp + 1.day)
end
context "when posts have timestamps in the future" do
it "should set the new timestamp as the default timestamp" do
new_timestamp = freeze_time
p3 = Fabricate(:post, topic: topic, created_at: new_timestamp + 3.days)
TopicTimestampChanger.new(topic: topic, timestamp: new_timestamp.to_f).change!
p3.reload
expect(p3.created_at).to eq_time(new_timestamp)
expect(p3.updated_at).to eq_time(new_timestamp)
end
end
end
it "deletes the stats cache" do
Discourse.redis.set AdminDashboardData.stats_cache_key, "X"
Discourse.redis.set About.stats_cache_key, "X"
TopicTimestampChanger.new(topic: topic, timestamp: Time.zone.now.to_f).change!
expect(Discourse.redis.get(AdminDashboardData.stats_cache_key)).to eq(nil)
expect(Discourse.redis.get(About.stats_cache_key)).to eq(nil)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class TrustLevelGranter
def initialize(trust_level, user)
@trust_level, @user = trust_level, user
end
def self.grant(trust_level, user)
TrustLevelGranter.new(trust_level, user).grant
end
def grant
if @user.trust_level < @trust_level
@user.change_trust_level!(@trust_level)
@user.save!
end
end
end
``` | # frozen_string_literal: true
RSpec.describe TrustLevelGranter do
describe "grant" do
it "grants trust level" do
user = Fabricate(:user, email: "[email protected]", trust_level: 0)
TrustLevelGranter.grant(3, user)
user.reload
expect(user.trust_level).to eq(3)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ThemeSettingsMigrationsRunner
Migration = Struct.new(:version, :name, :original_name, :code, :theme_field_id)
MIGRATION_ENTRY_POINT_JS = <<~JS
const migrate = require("discourse/theme/migration")?.default;
function main(settingsObj) {
if (!migrate) {
throw new Error("no_exported_migration_function");
}
if (typeof migrate !== "function") {
throw new Error("default_export_is_not_a_function");
}
const map = new Map(Object.entries(settingsObj));
const updatedMap = migrate(map);
if (!updatedMap) {
throw new Error("migration_function_no_returned_value");
}
if (!(updatedMap instanceof Map)) {
throw new Error("migration_function_wrong_return_type");
}
return Object.fromEntries(updatedMap.entries());
}
JS
private_constant :Migration, :MIGRATION_ENTRY_POINT_JS
def self.loader_js_lib_content
@loader_js_lib_content ||=
File.read(
File.join(
Rails.root,
"app/assets/javascripts/node_modules/loader.js/dist/loader/loader.js",
),
)
end
def initialize(theme, limit: 100, timeout: 100, memory: 2.megabytes)
@theme = theme
@limit = limit
@timeout = timeout
@memory = memory
end
def run
fields = lookup_pending_migrations_fields
count = fields.count
return [] if count == 0
raise_error("themes.import_error.migrations.too_many_pending_migrations") if count > @limit
migrations = convert_fields_to_migrations(fields)
migrations.sort_by!(&:version)
current_migration_version =
@theme.theme_settings_migrations.order(version: :desc).pick(:version)
current_migration_version ||= -Float::INFINITY
current_settings = lookup_overriden_settings
migrations.map do |migration|
if migration.version <= current_migration_version
raise_error(
"themes.import_error.migrations.out_of_sequence",
name: migration.original_name,
current: current_migration_version,
)
end
migrated_settings = execute(migration, current_settings)
results = {
version: migration.version,
name: migration.name,
original_name: migration.original_name,
theme_field_id: migration.theme_field_id,
settings_before: current_settings,
settings_after: migrated_settings,
}
current_settings = migrated_settings
current_migration_version = migration.version
results
rescue DiscourseJsProcessor::TranspileError => error
raise_error(
"themes.import_error.migrations.syntax_error",
name: migration.original_name,
error: error.message,
)
rescue MiniRacer::V8OutOfMemoryError
raise_error(
"themes.import_error.migrations.exceeded_memory_limit",
name: migration.original_name,
)
rescue MiniRacer::ScriptTerminatedError
raise_error("themes.import_error.migrations.timed_out", name: migration.original_name)
rescue MiniRacer::RuntimeError => error
message = error.message
if message.include?("no_exported_migration_function")
raise_error(
"themes.import_error.migrations.no_exported_function",
name: migration.original_name,
)
elsif message.include?("default_export_is_not_a_function")
raise_error(
"themes.import_error.migrations.default_export_not_a_function",
name: migration.original_name,
)
elsif message.include?("migration_function_no_returned_value")
raise_error(
"themes.import_error.migrations.no_returned_value",
name: migration.original_name,
)
elsif message.include?("migration_function_wrong_return_type")
raise_error(
"themes.import_error.migrations.wrong_return_type",
name: migration.original_name,
)
else
raise_error(
"themes.import_error.migrations.runtime_error",
name: migration.original_name,
error: message,
)
end
end
end
private
def lookup_pending_migrations_fields
@theme
.migration_fields
.left_joins(:theme_settings_migration)
.where(theme_settings_migration: { id: nil })
end
def convert_fields_to_migrations(fields)
fields.map do |field|
match_data = /\A(?<version>\d{4})-(?<name>.+)/.match(field.name)
if !match_data
raise_error("themes.import_error.migrations.invalid_filename", filename: field.name)
end
version = match_data[:version].to_i
name = match_data[:name]
original_name = field.name
Migration.new(
version: version,
name: name,
original_name: original_name,
code: field.value,
theme_field_id: field.id,
)
end
end
def lookup_overriden_settings
hash = {}
@theme.theme_settings.each { |row| hash[row.name] = ThemeSettingsManager.cast_row_value(row) }
hash
end
def execute(migration, settings)
context = MiniRacer::Context.new(timeout: @timeout, max_memory: @memory)
context.eval(self.class.loader_js_lib_content, filename: "loader.js")
context.eval(
DiscourseJsProcessor.transpile(migration.code, "", "discourse/theme/migration"),
filename: "theme-#{@theme.id}-migration.js",
)
context.eval(MIGRATION_ENTRY_POINT_JS, filename: "migration-entrypoint.js")
context.call("main", settings)
ensure
context&.dispose
end
def raise_error(message_key, **i18n_args)
raise Theme::SettingsMigrationError.new(I18n.t(message_key, **i18n_args))
end
end
``` | # frozen_string_literal: true
describe ThemeSettingsMigrationsRunner do
fab!(:theme)
fab!(:migration_field) { Fabricate(:migration_theme_field, version: 1, theme: theme) }
fab!(:settings_field) { Fabricate(:settings_theme_field, theme: theme, value: <<~YAML) }
integer_setting: 1
boolean_setting: true
string_setting: ""
YAML
describe "#run" do
it "passes values of overridden settings only to migrations" do
theme.update_setting(:integer_setting, 1)
theme.update_setting(:string_setting, "osama")
theme.save!
migration_field.update!(value: <<~JS)
export default function migrate(settings) {
if (settings.get("integer_setting") !== 1) {
throw new Error(`expected integer_setting to equal 1, but it's actually ${settings.get("integer_setting")}`);
}
if (settings.get("string_setting") !== "osama") {
throw new Error(`expected string_setting to equal "osama", but it's actually "${settings.get("string_setting")}"`);
}
if (settings.size !== 2) {
throw new Error(`expected the settings map to have only 2 keys, but instead got ${settings.size} keys`);
}
return settings;
}
JS
results = described_class.new(theme).run
expect(results.first[:theme_field_id]).to eq(migration_field.id)
expect(results.first[:settings_before]).to eq(
{ "integer_setting" => 1, "string_setting" => "osama" },
)
end
it "passes the output of the previous migration as input to the next one" do
theme.update_setting(:integer_setting, 1)
migration_field.update!(value: <<~JS)
export default function migrate(settings) {
settings.set("integer_setting", 111);
return settings;
}
JS
another_migration_field =
Fabricate(:migration_theme_field, theme: theme, version: 2, value: <<~JS)
export default function migrate(settings) {
if (settings.get("integer_setting") !== 111) {
throw new Error(`expected integer_setting to equal 111, but it's actually ${settings.get("integer_setting")}`);
}
settings.set("integer_setting", 222);
return settings;
}
JS
results = described_class.new(theme).run
expect(results.size).to eq(2)
expect(results[0][:theme_field_id]).to eq(migration_field.id)
expect(results[1][:theme_field_id]).to eq(another_migration_field.id)
expect(results[0][:settings_before]).to eq({})
expect(results[0][:settings_after]).to eq({ "integer_setting" => 111 })
expect(results[1][:settings_before]).to eq({ "integer_setting" => 111 })
expect(results[1][:settings_after]).to eq({ "integer_setting" => 222 })
end
it "doesn't run migrations that have already been ran" do
Fabricate(:theme_settings_migration, theme: theme, theme_field: migration_field)
pending_field = Fabricate(:migration_theme_field, theme: theme, version: 23)
results = described_class.new(theme).run
expect(results.size).to eq(1)
expect(results.first[:version]).to eq(23)
expect(results.first[:theme_field_id]).to eq(pending_field.id)
end
it "doesn't error when no migrations have been ran yet" do
results = described_class.new(theme).run
expect(results.size).to eq(1)
expect(results.first[:version]).to eq(1)
expect(results.first[:theme_field_id]).to eq(migration_field.id)
end
it "doesn't error when there are no pending migrations" do
Fabricate(:theme_settings_migration, theme: theme, theme_field: migration_field)
results = described_class.new(theme).run
expect(results.size).to eq(0)
end
it "raises an error when there are too many pending migrations" do
Fabricate(:migration_theme_field, theme: theme, version: 2)
expect do described_class.new(theme, limit: 1).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.too_many_pending_migrations"),
)
end
it "raises an error if a migration field has a badly formatted name" do
migration_field.update_attribute(:name, "020-some-name")
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.invalid_filename", filename: "020-some-name"),
)
migration_field.update_attribute(:name, "0020some-name")
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.invalid_filename", filename: "0020some-name"),
)
migration_field.update_attribute(:name, "0020")
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.invalid_filename", filename: "0020"),
)
end
it "raises an error if a pending migration has version lower than the last ran migration" do
migration_field.update!(name: "0020-some-name")
Fabricate(:theme_settings_migration, theme: theme, theme_field: migration_field, version: 20)
Fabricate(:migration_theme_field, theme: theme, version: 19, name: "0019-failing-migration")
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t(
"themes.import_error.migrations.out_of_sequence",
name: "0019-failing-migration",
current: 20,
),
)
end
it "detects bad syntax in migrations and raises an error" do
migration_field.update!(value: <<~JS)
export default function migrate() {
JS
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t(
"themes.import_error.migrations.syntax_error",
name: "0001-some-name",
error:
'SyntaxError: "/discourse/theme/migration: Unexpected token (2:0)\n\n 1 | export default function migrate() {\n> 2 |\n | ^"',
),
)
end
it "imposes memory limit on migrations and raises an error if they exceed the limit" do
migration_field.update!(value: <<~JS)
export default function migrate(settings) {
let a = new Array(10000);
while(true) {
a = a.concat(new Array(10000));
}
return settings;
}
JS
expect do described_class.new(theme, memory: 10.kilobytes).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.exceeded_memory_limit", name: "0001-some-name"),
)
end
it "imposes time limit on migrations and raises an error if they exceed the limit" do
migration_field.update!(value: <<~JS)
export default function migrate(settings) {
let a = 1;
while(true) {
a += 1;
}
return settings;
}
JS
expect do described_class.new(theme, timeout: 10).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.timed_out", name: "0001-some-name"),
)
end
it "raises a clear error message when the migration file doesn't export anything" do
migration_field.update!(value: <<~JS)
function migrate(settings) {
return settings;
}
JS
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.no_exported_function", name: "0001-some-name"),
)
end
it "raises a clear error message when the migration file exports the default as something that's not a function" do
migration_field.update!(value: <<~JS)
export function migrate(settings) {
return settings;
}
const AA = 1;
export default AA;
JS
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t(
"themes.import_error.migrations.default_export_not_a_function",
name: "0001-some-name",
),
)
end
it "raises a clear error message when the migration function doesn't return anything" do
migration_field.update!(value: <<~JS)
export default function migrate(settings) {}
JS
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.no_returned_value", name: "0001-some-name"),
)
end
it "raises a clear error message when the migration function doesn't return a Map" do
migration_field.update!(value: <<~JS)
export default function migrate(settings) {
return {};
}
JS
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t("themes.import_error.migrations.wrong_return_type", name: "0001-some-name"),
)
end
it "surfaces runtime errors that occur within the migration" do
migration_field.update!(value: <<~JS)
export default function migrate(settings) {
null.toString();
return settings;
}
JS
expect do described_class.new(theme).run end.to raise_error(
Theme::SettingsMigrationError,
I18n.t(
"themes.import_error.migrations.runtime_error",
name: "0001-some-name",
error: "TypeError: Cannot read properties of null (reading 'toString')",
),
)
end
it "returns a list of objects that each has data representing the migration and the results" do
results = described_class.new(theme).run
expect(results[0][:version]).to eq(1)
expect(results[0][:name]).to eq("some-name")
expect(results[0][:original_name]).to eq("0001-some-name")
expect(results[0][:theme_field_id]).to eq(migration_field.id)
expect(results[0][:settings_before]).to eq({})
expect(results[0][:settings_after]).to eq({})
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Responsible for destroying a User record
class UserDestroyer
class PostsExistError < RuntimeError
end
def initialize(actor)
@actor = actor
raise Discourse::InvalidParameters.new("acting user is nil") unless @actor && @actor.is_a?(User)
@guardian = Guardian.new(actor)
end
# Returns false if the user failed to be deleted.
# Returns a frozen instance of the User if the delete succeeded.
def destroy(user, opts = {})
raise Discourse::InvalidParameters.new("user is nil") unless user && user.is_a?(User)
raise PostsExistError if !opts[:delete_posts] && user.posts.joins(:topic).count != 0
@guardian.ensure_can_delete_user!(user)
# default to using a transaction
opts[:transaction] = true if opts[:transaction] != false
prepare_for_destroy(user) if opts[:prepare_for_destroy] == true
result = nil
optional_transaction(open_transaction: opts[:transaction]) do
UserSecurityKey.where(user_id: user.id).delete_all
Bookmark.where(user_id: user.id).delete_all
Draft.where(user_id: user.id).delete_all
Reviewable.where(created_by_id: user.id).delete_all
category_topic_ids = Category.where("topic_id IS NOT NULL").pluck(:topic_id)
if opts[:delete_posts]
DiscoursePluginRegistry.user_destroyer_on_content_deletion_callbacks.each do |cb|
cb.call(user, @guardian, opts)
end
agree_with_flags(user) if opts[:delete_as_spammer]
block_external_urls(user) if opts[:block_urls]
delete_posts(user, category_topic_ids, opts)
end
user.post_actions.find_each { |post_action| post_action.remove_act!(Discourse.system_user) }
# Add info about the user to staff action logs
UserHistory.staff_action_records(
Discourse.system_user,
acting_user: user.username,
).update_all(
["details = CONCAT(details, ?)", "\nuser_id: #{user.id}\nusername: #{user.username}"],
)
# keep track of emails used
user_emails = user.user_emails.pluck(:email)
if result = user.destroy
if opts[:block_email]
user_emails.each do |email|
ScreenedEmail.block(email, ip_address: result.ip_address)&.record_match!
end
end
if opts[:block_ip] && result.ip_address
ScreenedIpAddress.watch(result.ip_address)&.record_match!
if result.registration_ip_address && result.ip_address != result.registration_ip_address
ScreenedIpAddress.watch(result.registration_ip_address)&.record_match!
end
end
Post.unscoped.where(user_id: result.id).update_all(user_id: nil)
# If this user created categories, fix those up:
Category
.where(user_id: result.id)
.each do |c|
c.user_id = Discourse::SYSTEM_USER_ID
c.save!
if topic = Topic.unscoped.find_by(id: c.topic_id)
topic.recover!
topic.user_id = Discourse::SYSTEM_USER_ID
topic.save!
end
end
Invite
.where(email: user_emails)
.each do |invite|
# invited_users will be removed by dependent destroy association when user is destroyed
invite.invited_groups.destroy_all
invite.topic_invites.destroy_all
invite.destroy
end
unless opts[:quiet]
if @actor == user
deleted_by = Discourse.system_user
opts[:context] = I18n.t("staff_action_logs.user_delete_self", url: opts[:context])
else
deleted_by = @actor
end
StaffActionLogger.new(deleted_by).log_user_deletion(user, opts.slice(:context))
if opts.slice(:context).blank?
Rails.logger.warn("User destroyed without context from: #{caller_locations(14, 1)[0]}")
end
end
MessageBus.publish "/logout/#{result.id}", result.id, user_ids: [result.id]
end
end
# After the user is deleted, remove the reviewable
if reviewable = ReviewableUser.pending.find_by(target: user)
reviewable.perform(@actor, :delete_user)
end
result
end
protected
def block_external_urls(user)
TopicLink
.where(user: user, internal: false)
.find_each do |link|
next if Oneboxer.engine(link.url) != Onebox::Engine::AllowlistedGenericOnebox
ScreenedUrl.watch(link.url, link.domain, ip_address: user.ip_address)&.record_match!
end
end
def agree_with_flags(user)
ReviewableFlaggedPost
.where(target_created_by: user)
.find_each do |reviewable|
if reviewable.actions_for(@guardian).has?(:agree_and_keep)
reviewable.perform(@actor, :agree_and_keep)
end
end
ReviewablePost
.where(target_created_by: user)
.find_each do |reviewable|
if reviewable.actions_for(@guardian).has?(:reject_and_delete)
reviewable.perform(@actor, :reject_and_delete)
end
end
end
def delete_posts(user, category_topic_ids, opts)
user.posts.find_each do |post|
if post.is_first_post? && category_topic_ids.include?(post.topic_id)
post.update!(user: Discourse.system_user)
else
PostDestroyer.new(@actor.staff? ? @actor : Discourse.system_user, post).destroy
end
if post.topic && post.is_first_post?
Topic.unscoped.where(id: post.topic_id).update_all(user_id: nil)
end
end
end
def prepare_for_destroy(user)
PostAction.where(user_id: user.id).delete_all
UserAction.where(
"user_id = :user_id OR target_user_id = :user_id OR acting_user_id = :user_id",
user_id: user.id,
).delete_all
PostTiming.where(user_id: user.id).delete_all
TopicViewItem.where(user_id: user.id).delete_all
TopicUser.where(user_id: user.id).delete_all
TopicAllowedUser.where(user_id: user.id).delete_all
Notification.where(user_id: user.id).delete_all
end
def optional_transaction(open_transaction: true)
if open_transaction
User.transaction { yield }
else
yield
end
end
end
``` | # frozen_string_literal: true
RSpec.describe UserDestroyer do
fab!(:user) { Fabricate(:user_with_secondary_email) }
fab!(:admin)
describe ".new" do
it "raises an error when user is nil" do
expect { UserDestroyer.new(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when user is not a User" do
expect { UserDestroyer.new(5) }.to raise_error(Discourse::InvalidParameters)
end
end
describe "#destroy" do
it "raises an error when user is nil" do
expect { UserDestroyer.new(admin).destroy(nil) }.to raise_error(Discourse::InvalidParameters)
end
it "raises an error when user is not a User" do
expect { UserDestroyer.new(admin).destroy("nothing") }.to raise_error(
Discourse::InvalidParameters,
)
end
it "raises an error when regular user tries to delete another user" do
expect { UserDestroyer.new(user).destroy(Fabricate(:user)) }.to raise_error(
Discourse::InvalidAccess,
)
end
shared_examples "successfully destroy a user" do
it "should delete the user" do
expect { destroy }.to change { User.count }.by(-1)
end
it "should return the deleted user record" do
return_value = destroy
expect(return_value).to eq(user)
expect(return_value).to be_destroyed
end
it "should log the action" do
StaffActionLogger.any_instance.expects(:log_user_deletion).with(user, anything).once
destroy
end
it "should not log the action if quiet is true" do
expect {
UserDestroyer.new(admin).destroy(user, destroy_opts.merge(quiet: true))
}.to_not change { UserHistory.where(action: UserHistory.actions[:delete_user]).count }
end
it "triggers a extensibility event" do
event = DiscourseEvent.track_events { destroy }.last
expect(event[:event_name]).to eq(:user_destroyed)
expect(event[:params].first).to eq(user)
end
end
shared_examples "email block list" do
it "doesn't add email to block list by default" do
ScreenedEmail.expects(:block).never
destroy
end
it "adds emails to block list if block_email is true" do
expect {
UserDestroyer.new(admin).destroy(user, destroy_opts.merge(block_email: true))
}.to change { ScreenedEmail.count }.by(2)
end
end
context "when user deletes self" do
subject(:destroy) { UserDestroyer.new(user).destroy(user, destroy_opts) }
let(:destroy_opts) { { delete_posts: true, context: "/u/username/preferences/account" } }
include_examples "successfully destroy a user"
it "should log proper context" do
destroy
expect(UserHistory.where(action: UserHistory.actions[:delete_user]).last.context).to eq(
I18n.t("staff_action_logs.user_delete_self", url: "/u/username/preferences/account"),
)
end
end
context "when context is missing" do
it "logs warning message if context is missing" do
logger = track_log_messages { UserDestroyer.new(admin).destroy(user) }
expect(logger.warnings).to include(/User destroyed without context from:/)
end
end
context "with a reviewable post" do
let!(:reviewable) { Fabricate(:reviewable, created_by: user) }
it "removes the queued post" do
UserDestroyer.new(admin).destroy(user)
expect(Reviewable.where(created_by_id: user.id).count).to eq(0)
end
end
context "with a reviewable user" do
let(:reviewable) { Fabricate(:reviewable, created_by: admin) }
it "sets the reviewable user as rejected" do
UserDestroyer.new(admin).destroy(reviewable.target)
expect(reviewable.reload).to be_rejected
end
end
context "with a directory item record" do
it "removes the directory item" do
DirectoryItem.create!(
user: user,
period_type: 1,
likes_received: 0,
likes_given: 0,
topics_entered: 0,
topic_count: 0,
post_count: 0,
)
UserDestroyer.new(admin).destroy(user)
expect(DirectoryItem.where(user_id: user.id).count).to eq(0)
end
end
context "with a draft" do
let!(:draft) { Draft.set(user, "test", 0, "test") }
it "removed the draft" do
UserDestroyer.new(admin).destroy(user)
expect(Draft.where(user_id: user.id).count).to eq(0)
end
end
context "when user has posts" do
let!(:topic_starter) { Fabricate(:user) }
let!(:topic) { Fabricate(:topic, user: topic_starter) }
let!(:first_post) { Fabricate(:post, user: topic_starter, topic: topic) }
let!(:post) { Fabricate(:post, user: user, topic: topic) }
context "when delete_posts is false" do
subject(:destroy) { UserDestroyer.new(admin).destroy(user) }
before do
user.stubs(:post_count).returns(1)
user.stubs(:first_post_created_at).returns(Time.zone.now)
end
it "should raise the right error" do
StaffActionLogger.any_instance.expects(:log_user_deletion).never
expect { destroy }.to raise_error(UserDestroyer::PostsExistError)
expect(user.reload.id).to be_present
end
end
context "when delete_posts is true" do
let(:destroy_opts) { { delete_posts: true } }
context "when staff deletes user" do
subject(:destroy) { UserDestroyer.new(admin).destroy(user, destroy_opts) }
include_examples "successfully destroy a user"
include_examples "email block list"
it "deletes the posts" do
destroy
expect(post.reload.deleted_at).not_to eq(nil)
expect(post.user_id).to eq(nil)
end
it "does not delete topics started by others in which the user has replies" do
destroy
expect(topic.reload.deleted_at).to eq(nil)
expect(topic.user_id).not_to eq(nil)
end
it "deletes topics started by the deleted user" do
spammer_topic = Fabricate(:topic, user: user)
Fabricate(:post, user: user, topic: spammer_topic)
destroy
expect(spammer_topic.reload.deleted_at).not_to eq(nil)
expect(spammer_topic.user_id).to eq(nil)
end
context "when delete_as_spammer is true" do
before { destroy_opts[:delete_as_spammer] = true }
it "approves reviewable flags" do
spammer_post = Fabricate(:post, user: user)
reviewable = PostActionCreator.inappropriate(admin, spammer_post).reviewable
expect(reviewable).to be_pending
destroy
reviewable.reload
expect(reviewable).to be_approved
end
it "rejects pending posts" do
post = Fabricate(:post, user: user)
reviewable =
Fabricate(
:reviewable,
type: "ReviewablePost",
target_type: "Post",
target_id: post.id,
created_by: Discourse.system_user,
target_created_by: user,
)
expect(reviewable).to be_pending
destroy
reviewable.reload
expect(reviewable).to be_rejected
end
end
end
context "when users deletes self" do
subject(:destroy) { UserDestroyer.new(user).destroy(user, destroy_opts) }
include_examples "successfully destroy a user"
include_examples "email block list"
it "deletes the posts" do
destroy
expect(post.reload.deleted_at).not_to eq(nil)
expect(post.user_id).to eq(nil)
end
end
end
end
context "when user was invited" do
it "should delete the invite of user" do
invite = Fabricate(:invite)
topic_invite = invite.topic_invites.create!(topic: Fabricate(:topic))
invited_group = invite.invited_groups.create!(group: Fabricate(:group))
user = Fabricate(:user)
user.user_emails.create!(email: invite.email)
UserDestroyer.new(admin).destroy(user)
expect(Invite.exists?(invite.id)).to eq(false)
expect(InvitedGroup.exists?(invited_group.id)).to eq(false)
expect(TopicInvite.exists?(topic_invite.id)).to eq(false)
end
end
context "when user created category" do
let!(:topic) { Fabricate(:topic, user: user) }
let!(:first_post) { Fabricate(:post, user: user, topic: topic) }
let!(:second_post) { Fabricate(:post, user: user, topic: topic) }
let!(:category) { Fabricate(:category, user: user, topic_id: topic.id) }
it "changes author of first category post to system user and still deletes second post" do
UserDestroyer.new(admin).destroy(user, delete_posts: true)
expect(first_post.reload.deleted_at).to eq(nil)
expect(first_post.user_id).to eq(Discourse.system_user.id)
expect(second_post.reload.deleted_at).not_to eq(nil)
expect(second_post.user_id).to eq(nil)
end
end
context "when user has no posts, but user_stats table has post_count > 0" do
subject(:destroy) { UserDestroyer.new(user).destroy(user, delete_posts: false) }
let(:destroy_opts) { {} }
before do
# out of sync user_stat data shouldn't break UserDestroyer
user.user_stat.update_attribute(:post_count, 1)
end
include_examples "successfully destroy a user"
end
context "when user has deleted posts" do
let!(:deleted_post) { Fabricate(:post, user: user, deleted_at: 1.hour.ago) }
it "should mark the user's deleted posts as belonging to a nuked user" do
expect { UserDestroyer.new(admin).destroy(user) }.to change { User.count }.by(-1)
expect(deleted_post.reload.user_id).to eq(nil)
end
end
context "when user has no posts" do
context "when destroy succeeds" do
subject(:destroy) { UserDestroyer.new(admin).destroy(user) }
let(:destroy_opts) { {} }
include_examples "successfully destroy a user"
include_examples "email block list"
end
context "when destroy fails" do
subject(:destroy) { UserDestroyer.new(admin).destroy(user) }
it "should not log the action" do
user.stubs(:destroy).returns(false)
StaffActionLogger.any_instance.expects(:log_user_deletion).never
destroy
end
end
end
context "when user has posts with links" do
context "with external links" do
before do
@post = Fabricate(:post_with_external_links, user: user)
TopicLink.extract_from(@post)
end
it "doesn't add ScreenedUrl records by default" do
ScreenedUrl.expects(:watch).never
UserDestroyer.new(admin).destroy(user, delete_posts: true)
end
it "adds ScreenedUrl records when :block_urls is true" do
ScreenedUrl.expects(:watch).with(anything, anything, has_key(:ip_address)).at_least_once
UserDestroyer.new(admin).destroy(user, delete_posts: true, block_urls: true)
end
end
context "with internal links" do
before do
@post = Fabricate(:post_with_external_links, user: user)
TopicLink.extract_from(@post)
TopicLink.where(user: user).update_all(internal: true)
end
it "doesn't add ScreenedUrl records" do
ScreenedUrl.expects(:watch).never
UserDestroyer.new(admin).destroy(user, delete_posts: true, block_urls: true)
end
end
context "with oneboxed links" do
before do
@post = Fabricate(:post_with_youtube, user: user)
TopicLink.extract_from(@post)
end
it "doesn't add ScreenedUrl records" do
ScreenedUrl.expects(:watch).never
UserDestroyer.new(admin).destroy(user, delete_posts: true, block_urls: true)
end
end
end
context "with ip address screening" do
it "doesn't create screened_ip_address records by default" do
ScreenedIpAddress.expects(:watch).never
UserDestroyer.new(admin).destroy(user)
end
context "when block_ip is true" do
it "creates a new screened_ip_address record" do
ScreenedIpAddress.expects(:watch).with(user.ip_address).returns(stub_everything)
UserDestroyer.new(admin).destroy(user, block_ip: true)
end
it "creates two new screened_ip_address records when registration_ip_address is different than last ip_address" do
user.registration_ip_address = "12.12.12.12"
ScreenedIpAddress.expects(:watch).with(user.ip_address).returns(stub_everything)
ScreenedIpAddress
.expects(:watch)
.with(user.registration_ip_address)
.returns(stub_everything)
UserDestroyer.new(admin).destroy(user, block_ip: true)
end
end
end
context "when user created a category" do
let!(:category) { Fabricate(:category_with_definition, user: user) }
it "assigns the system user to the categories" do
UserDestroyer.new(admin).destroy(user, delete_posts: true)
expect(category.reload.user_id).to eq(Discourse.system_user.id)
expect(category.topic).to be_present
expect(category.topic.user_id).to eq(Discourse.system_user.id)
end
end
describe "Destroying a user with security key" do
let!(:security_key) { Fabricate(:user_security_key_with_random_credential, user: user) }
it "removes the security key" do
UserDestroyer.new(admin).destroy(user)
expect(UserSecurityKey.where(user_id: user.id).count).to eq(0)
end
end
describe "Destroying a user with a bookmark" do
let!(:bookmark) { Fabricate(:bookmark, user: user) }
it "removes the bookmark" do
UserDestroyer.new(admin).destroy(user)
expect(Bookmark.where(user_id: user.id).count).to eq(0)
end
end
context "when user liked things" do
before do
@topic = Fabricate(:topic, user: Fabricate(:user))
@post = Fabricate(:post, user: @topic.user, topic: @topic)
PostActionCreator.like(user, @post)
end
it "should destroy the like" do
expect { UserDestroyer.new(admin).destroy(user, delete_posts: true) }.to change {
PostAction.count
}.by(-1)
expect(@post.reload.like_count).to eq(0)
end
end
context "when user belongs to groups that grant trust level" do
let(:group) { Fabricate(:group, grant_trust_level: 4) }
before { group.add(user) }
it "can delete the user" do
d = UserDestroyer.new(admin)
expect { d.destroy(user) }.to change { User.count }.by(-1)
end
it "can delete the user if they have a manual locked trust level and have no email" do
user.update(manual_locked_trust_level: 3)
UserEmail.where(user: user).delete_all
user.reload
expect { UserDestroyer.new(admin).destroy(user) }.to change { User.count }.by(-1)
end
it "can delete the user if they were to fall into another trust level and have no email" do
g2 = Fabricate(:group, grant_trust_level: 1)
g2.add(user)
UserEmail.where(user: user).delete_all
user.reload
expect { UserDestroyer.new(admin).destroy(user) }.to change { User.count }.by(-1)
end
end
context "when user has staff action logs" do
before do
logger = StaffActionLogger.new(user)
logger.log_site_setting_change(
"site_description",
"Our friendly community",
"My favourite community",
)
logger.log_site_setting_change(
"site_description",
"Our friendly community",
"My favourite community",
details: "existing details",
)
end
it "should keep the staff action log and add the username" do
username = user.username
ids =
UserHistory.staff_action_records(Discourse.system_user, acting_user: username).map(&:id)
UserDestroyer.new(admin).destroy(user, delete_posts: true)
details = UserHistory.where(id: ids).map(&:details)
expect(details).to contain_exactly(
"\nuser_id: #{user.id}\nusername: #{username}",
"existing details\nuser_id: #{user.id}\nusername: #{username}",
)
end
end
context "when user got an email" do
let!(:email_log) { Fabricate(:email_log, user: user) }
it "does not delete the email log" do
expect { UserDestroyer.new(admin).destroy(user, delete_posts: true) }.to_not change {
EmailLog.count
}
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module WildcardDomainChecker
def self.check_domain(domain, external_domain)
escaped_domain =
domain[0] == "*" ? Regexp.escape(domain).sub("\\*", '\S*') : Regexp.escape(domain)
domain_regex = Regexp.new("\\A#{escaped_domain}\\z", "i")
external_domain.match(domain_regex)
end
end
``` | # frozen_string_literal: true
RSpec.describe WildcardDomainChecker do
describe ".check_domain" do
context "when domain is valid" do
it "returns correct domain" do
result1 =
WildcardDomainChecker.check_domain(
"*.discourse.org",
"anything.is.possible.discourse.org",
)
expect(result1[0]).to eq("anything.is.possible.discourse.org")
result2 = WildcardDomainChecker.check_domain("www.discourse.org", "www.discourse.org")
expect(result2[0]).to eq("www.discourse.org")
result3 = WildcardDomainChecker.check_domain("*", "hello.discourse.org")
expect(result3[0]).to eq("hello.discourse.org")
end
end
context "when domain is invalid" do
it "doesn't return the domain" do
result1 =
WildcardDomainChecker.check_domain("*.discourse.org", "bad-domain.discourse.org.evil.com")
expect(result1).to eq(nil)
result2 =
WildcardDomainChecker.check_domain("www.discourse.org", "www.discourse.org.evil.com")
expect(result2).to eq(nil)
result3 = WildcardDomainChecker.check_domain("www.discourse.org", "www.www.discourse.org")
expect(result3).to eq(nil)
result4 = WildcardDomainChecker.check_domain("www.*.discourse.org", "www.www.discourse.org")
expect(result4).to eq(nil)
result5 =
WildcardDomainChecker.check_domain(
"www.discourse.org",
"www.discourse.org\nwww.discourse.org.evil.com",
)
expect(result5).to eq(nil)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# GroupMessage sends a private message to a group.
# It will also avoid sending the same message repeatedly, which can happen with
# notifications to moderators when spam is detected.
#
# Options:
#
# user: (User) If the message is about a user, pass the user object.
# limit_once_per: (seconds) Limit sending the given type of message once every X seconds.
# The default is 24 hours. Set to false to always send the message.
class GroupMessage
include Rails.application.routes.url_helpers
RECENT_MESSAGE_PERIOD = 3.months
def self.create(group_name, message_type, opts = {})
GroupMessage.new(group_name, message_type, opts).create
end
def initialize(group_name, message_type, opts = {})
@group_name = group_name
@message_type = message_type
@opts = opts
end
def create
return false if sent_recently?
post =
PostCreator.create(
Discourse.system_user,
target_group_names: [@group_name],
archetype: Archetype.private_message,
subtype: TopicSubtype.system_message,
title: I18n.t("system_messages.#{@message_type}.subject_template", message_params),
raw: I18n.t("system_messages.#{@message_type}.text_body_template", message_params),
)
remember_message_sent
post
end
def delete_previous!(respect_sent_recently: true, match_raw: true)
return false if respect_sent_recently && sent_recently?
posts =
Post
.joins(topic: { topic_allowed_groups: :group })
.where(
topic: {
posts_count: 1,
user_id: Discourse.system_user,
archetype: Archetype.private_message,
subtype: TopicSubtype.system_message,
title: I18n.t("system_messages.#{@message_type}.subject_template", message_params),
topic_allowed_groups: {
groups: {
name: @group_name,
},
},
},
)
.where("posts.created_at > ?", RECENT_MESSAGE_PERIOD.ago)
if match_raw
posts =
posts.where(
raw: I18n.t("system_messages.#{@message_type}.text_body_template", message_params).rstrip,
)
end
posts.find_each { |post| PostDestroyer.new(Discourse.system_user, post).destroy }
end
def message_params
@message_params ||=
begin
h = { base_url: Discourse.base_url }.merge(@opts[:message_params] || {})
if @opts[:user]
h.merge!(username: @opts[:user].username, user_url: user_path(@opts[:user].username))
end
h
end
end
def sent_recently?
return false if @opts[:limit_once_per] == false
Discourse.redis.get(sent_recently_key).present?
end
# default is to send no more than once every 24 hours (24 * 60 * 60 = 86,400 seconds)
def remember_message_sent
unless @opts[:limit_once_per] == false
Discourse.redis.setex(sent_recently_key, @opts[:limit_once_per].try(:to_i) || 86_400, 1)
end
end
def sent_recently_key
"grpmsg:#{@group_name}:#{@message_type}:#{@opts[:user] ? @opts[:user].username : ""}"
end
end
``` | # frozen_string_literal: true
RSpec.describe GroupMessage do
subject(:send_group_message) do
GroupMessage.create(moderators_group, :user_automatically_silenced, user: user)
end
let(:moderators_group) { Group[:moderators].name }
let!(:admin) { Fabricate.build(:admin, id: 999) }
let!(:user) { Fabricate.build(:user, id: 111) }
before { Discourse.stubs(:system_user).returns(admin) }
describe "not sent recently" do
before { GroupMessage.any_instance.stubs(:sent_recently?).returns(false) }
it "should send a private message to the given group" do
PostCreator
.expects(:create)
.with do |from_user, opts|
from_user.id == (admin.id) && opts[:target_group_names] &&
opts[:target_group_names].include?(Group[:moderators].name) &&
opts[:archetype] == (Archetype.private_message) && opts[:title].present? &&
opts[:raw].present?
end
.returns(stub_everything)
send_group_message
end
it "returns whatever PostCreator returns" do
the_output = stub_everything
PostCreator.stubs(:create).returns(the_output)
expect(send_group_message).to eq(the_output)
end
it "remembers that it was sent so it doesn't spam the group with the same message" do
PostCreator.stubs(:create).returns(stub_everything)
GroupMessage.any_instance.expects(:remember_message_sent)
send_group_message
end
end
describe "sent recently" do
subject(:group_message) do
GroupMessage.create(moderators_group, :user_automatically_silenced, user: user)
end
before { GroupMessage.any_instance.stubs(:sent_recently?).returns(true) }
it { is_expected.to eq(false) }
it "should not send the same notification again" do
PostCreator.expects(:create).never
group_message
end
end
describe "message_params" do
let(:user) { Fabricate.build(:user, id: 123_123) }
shared_examples "common message params for group messages" do
it "returns the correct params" do
expect(message_params[:username]).to eq(user.username)
expect(message_params[:user_url]).to be_present
end
end
context "with user_automatically_silenced" do
subject(:message_params) do
GroupMessage.new(moderators_group, :user_automatically_silenced, user: user).message_params
end
include_examples "common message params for group messages"
end
context "with spam_post_blocked" do
subject(:message_params) do
GroupMessage.new(moderators_group, :spam_post_blocked, user: user).message_params
end
include_examples "common message params for group messages"
end
end
describe "methods that use redis" do
subject(:group_message) do
GroupMessage.new(moderators_group, :user_automatically_silenced, user: user)
end
let(:user) { Fabricate.build(:user, id: 123_123) }
before do
PostCreator.stubs(:create).returns(stub_everything)
group_message.stubs(:sent_recently_key).returns("the_key")
end
describe "sent_recently?" do
it "returns true if redis says so" do
Discourse.redis.stubs(:get).with(group_message.sent_recently_key).returns("1")
expect(group_message.sent_recently?).to be_truthy
end
it "returns false if redis returns nil" do
Discourse.redis.stubs(:get).with(group_message.sent_recently_key).returns(nil)
expect(group_message.sent_recently?).to be_falsey
end
it "always returns false if limit_once_per is false" do
gm =
GroupMessage.new(
moderators_group,
:user_automatically_silenced,
user: user,
limit_once_per: false,
)
gm.stubs(:sent_recently_key).returns("the_key")
Discourse.redis.stubs(:get).with(gm.sent_recently_key).returns("1")
expect(gm.sent_recently?).to be_falsey
end
end
describe "remember_message_sent" do
it "stores a key in redis that expires after 24 hours" do
Discourse
.redis
.expects(:setex)
.with(group_message.sent_recently_key, 24 * 60 * 60, anything)
.returns("OK")
group_message.remember_message_sent
end
it "can use a given expiry time" do
Discourse.redis.expects(:setex).with(anything, 30 * 60, anything).returns("OK")
GroupMessage.new(
moderators_group,
:user_automatically_silenced,
user: user,
limit_once_per: 30.minutes,
).remember_message_sent
end
it "can be disabled" do
Discourse.redis.expects(:setex).never
GroupMessage.new(
moderators_group,
:user_automatically_silenced,
user: user,
limit_once_per: false,
).remember_message_sent
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserMerger
def initialize(source_user, target_user, acting_user = nil)
@source_user = source_user
@target_user = target_user
@acting_user = acting_user
@user_id = source_user.id
@source_primary_email = source_user.email
end
def merge!
update_username
move_posts
update_user_ids
merge_given_daily_likes
merge_post_timings
merge_user_visits
update_site_settings
merge_user_attributes
DiscourseEvent.trigger(:merging_users, @source_user, @target_user)
update_user_stats
delete_source_user
log_merge
@target_user.reload
end
protected
def update_username
return if @source_user.username == @target_user.username
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.updating_username") },
user_ids: [@acting_user.id]
end
UsernameChanger.update_username(
user_id: @source_user.id,
old_username: @source_user.username,
new_username: @target_user.username,
avatar_template: @target_user.avatar_template,
asynchronous: false,
)
end
def move_posts
posts =
Post
.with_deleted
.where(user_id: @source_user.id)
.order(:topic_id, :post_number)
.pluck(:topic_id, :id)
return if posts.count == 0
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.changing_post_ownership") },
user_ids: [@acting_user.id]
end
last_topic_id = nil
post_ids = []
posts.each do |current_topic_id, current_post_id|
if last_topic_id != current_topic_id && post_ids.any?
change_post_owner(last_topic_id, post_ids)
post_ids = []
end
last_topic_id = current_topic_id
post_ids << current_post_id
end
change_post_owner(last_topic_id, post_ids) if post_ids.any?
end
def change_post_owner(topic_id, post_ids)
PostOwnerChanger.new(
topic_id: topic_id,
post_ids: post_ids,
new_owner: @target_user,
acting_user: Discourse.system_user,
skip_revision: true,
).change_owner!
end
def merge_given_daily_likes
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.merging_given_daily_likes") },
user_ids: [@acting_user.id]
end
sql = <<~SQL
INSERT INTO given_daily_likes AS g (user_id, likes_given, given_date, limit_reached)
SELECT
:target_user_id AS user_id,
COUNT(1) AS likes_given,
a.created_at::DATE AS given_date,
COUNT(1) >= :max_likes_per_day AS limit_reached
FROM post_actions AS a
WHERE a.user_id = :target_user_id
AND a.deleted_at IS NULL
AND EXISTS(
SELECT 1
FROM given_daily_likes AS g
WHERE g.user_id = :source_user_id AND a.created_at::DATE = g.given_date
)
GROUP BY given_date
ON CONFLICT (user_id, given_date)
DO UPDATE
SET likes_given = EXCLUDED.likes_given,
limit_reached = EXCLUDED.limit_reached
SQL
DB.exec(
sql,
source_user_id: @source_user.id,
target_user_id: @target_user.id,
max_likes_per_day: SiteSetting.max_likes_per_day,
action_type_id: PostActionType.types[:like],
)
end
def merge_post_timings
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.merging_post_timings") },
user_ids: [@acting_user.id]
end
update_user_id(
:post_timings,
conditions: ["x.topic_id = y.topic_id", "x.post_number = y.post_number"],
)
sql = <<~SQL
UPDATE post_timings AS t
SET msecs = LEAST(t.msecs::bigint + s.msecs, 2^31 - 1)
FROM post_timings AS s
WHERE t.user_id = :target_user_id AND s.user_id = :source_user_id
AND t.topic_id = s.topic_id AND t.post_number = s.post_number
SQL
DB.exec(sql, source_user_id: @source_user.id, target_user_id: @target_user.id)
end
def merge_user_visits
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.merging_user_visits") },
user_ids: [@acting_user.id]
end
update_user_id(:user_visits, conditions: "x.visited_at = y.visited_at")
sql = <<~SQL
UPDATE user_visits AS t
SET posts_read = t.posts_read + s.posts_read,
mobile = t.mobile OR s.mobile,
time_read = t.time_read + s.time_read
FROM user_visits AS s
WHERE t.user_id = :target_user_id AND s.user_id = :source_user_id
AND t.visited_at = s.visited_at
SQL
DB.exec(sql, source_user_id: @source_user.id, target_user_id: @target_user.id)
end
def update_site_settings
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.updating_site_settings") },
user_ids: [@acting_user.id]
end
SiteSetting
.all_settings(include_hidden: true)
.each do |setting|
if setting[:type] == "username" && setting[:value] == @source_user.username
SiteSetting.set_and_log(setting[:setting], @target_user.username)
end
end
end
def update_user_stats
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.updating_user_stats") },
user_ids: [@acting_user.id]
end
# topics_entered
DB.exec(<<~SQL, target_user_id: @target_user.id)
UPDATE user_stats
SET topics_entered = (
SELECT COUNT(topic_id)
FROM topic_views
WHERE user_id = :target_user_id
)
WHERE user_id = :target_user_id
SQL
# time_read and days_visited
DB.exec(<<~SQL, target_user_id: @target_user.id)
UPDATE user_stats
SET time_read = COALESCE(x.time_read, 0),
days_visited = COALESCE(x.days_visited, 0)
FROM (
SELECT
SUM(time_read) AS time_read,
COUNT(1) AS days_visited
FROM user_visits
WHERE user_id = :target_user_id
) AS x
WHERE user_id = :target_user_id
SQL
# posts_read_count
DB.exec(<<~SQL, target_user_id: @target_user.id)
UPDATE user_stats
SET posts_read_count = (
SELECT COUNT(1)
FROM post_timings AS pt
WHERE pt.user_id = :target_user_id AND EXISTS(
SELECT 1
FROM topics AS t
WHERE t.archetype = 'regular' AND t.deleted_at IS NULL
))
WHERE user_id = :target_user_id
SQL
# likes_given, likes_received, new_since, read_faq, first_post_created_at
DB.exec(<<~SQL, source_user_id: @source_user.id, target_user_id: @target_user.id)
UPDATE user_stats AS t
SET likes_given = t.likes_given + s.likes_given,
likes_received = t.likes_received + s.likes_received,
new_since = LEAST(t.new_since, s.new_since),
read_faq = LEAST(t.read_faq, s.read_faq),
first_post_created_at = LEAST(t.first_post_created_at, s.first_post_created_at)
FROM user_stats AS s
WHERE t.user_id = :target_user_id AND s.user_id = :source_user_id
SQL
end
def merge_user_attributes
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.merging_user_attributes") },
user_ids: [@acting_user.id]
end
DB.exec(<<~SQL, source_user_id: @source_user.id, target_user_id: @target_user.id)
UPDATE users AS t
SET created_at = LEAST(t.created_at, s.created_at),
updated_at = LEAST(t.updated_at, s.updated_at),
seen_notification_id = GREATEST(t.seen_notification_id, s.seen_notification_id),
last_posted_at = GREATEST(t.last_seen_at, s.last_seen_at),
last_seen_at = GREATEST(t.last_seen_at, s.last_seen_at),
admin = t.admin OR s.admin,
last_emailed_at = GREATEST(t.last_emailed_at, s.last_emailed_at),
trust_level = GREATEST(t.trust_level, s.trust_level),
previous_visit_at = GREATEST(t.previous_visit_at, s.previous_visit_at),
date_of_birth = COALESCE(t.date_of_birth, s.date_of_birth),
ip_address = COALESCE(t.ip_address, s.ip_address),
moderator = t.moderator OR s.moderator,
title = COALESCE(t.title, s.title),
primary_group_id = COALESCE(t.primary_group_id, s.primary_group_id),
registration_ip_address = COALESCE(t.registration_ip_address, s.registration_ip_address),
first_seen_at = LEAST(t.first_seen_at, s.first_seen_at),
manual_locked_trust_level = GREATEST(t.manual_locked_trust_level, s.manual_locked_trust_level)
FROM users AS s
WHERE t.id = :target_user_id AND s.id = :source_user_id
SQL
DB.exec(<<~SQL, source_user_id: @source_user.id, target_user_id: @target_user.id)
UPDATE user_profiles AS t
SET location = COALESCE(t.location, s.location),
website = COALESCE(t.website, s.website),
bio_raw = COALESCE(t.bio_raw, s.bio_raw),
bio_cooked = COALESCE(t.bio_cooked, s.bio_cooked),
bio_cooked_version = COALESCE(t.bio_cooked_version, s.bio_cooked_version),
profile_background_upload_id = COALESCE(t.profile_background_upload_id, s.profile_background_upload_id),
dismissed_banner_key = COALESCE(t.dismissed_banner_key, s.dismissed_banner_key),
granted_title_badge_id = COALESCE(t.granted_title_badge_id, s.granted_title_badge_id),
card_background_upload_id = COALESCE(t.card_background_upload_id, s.card_background_upload_id),
views = t.views + s.views
FROM user_profiles AS s
WHERE t.user_id = :target_user_id AND s.user_id = :source_user_id
SQL
end
def update_user_ids
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.updating_user_ids") },
user_ids: [@acting_user.id]
end
Category.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
update_user_id(:category_users, conditions: ["x.category_id = y.category_id"])
update_user_id(:developers)
update_user_id(:draft_sequences, conditions: "x.draft_key = y.draft_key")
update_user_id(:drafts, conditions: "x.draft_key = y.draft_key")
update_user_id(:dismissed_topic_users, conditions: "x.topic_id = y.topic_id")
EmailLog.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
GroupHistory.where(acting_user_id: @source_user.id).update_all(acting_user_id: @target_user.id)
GroupHistory.where(target_user_id: @source_user.id).update_all(target_user_id: @target_user.id)
update_user_id(:group_users, conditions: "x.group_id = y.group_id")
IncomingEmail.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
IncomingLink.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
IncomingLink.where(current_user_id: @source_user.id).update_all(
current_user_id: @target_user.id,
)
InvitedUser.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
Invite
.with_deleted
.where(invited_by_id: @source_user.id)
.update_all(invited_by_id: @target_user.id)
Invite
.with_deleted
.where(deleted_by_id: @source_user.id)
.update_all(deleted_by_id: @target_user.id)
update_user_id(:muted_users, conditions: "x.muted_user_id = y.muted_user_id")
update_user_id(
:muted_users,
user_id_column_name: "muted_user_id",
conditions: "x.user_id = y.user_id",
)
update_user_id(:ignored_users, conditions: "x.ignored_user_id = y.ignored_user_id")
update_user_id(
:ignored_users,
user_id_column_name: "ignored_user_id",
conditions: "x.user_id = y.user_id",
)
Notification.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
update_user_id(
:post_actions,
conditions: [
"x.post_id = y.post_id",
"x.post_action_type_id = y.post_action_type_id",
"x.targets_topic = y.targets_topic",
],
)
PostAction.where(deleted_by_id: @source_user.id).update_all(deleted_by_id: @target_user.id)
PostAction.where(deferred_by_id: @source_user.id).update_all(deferred_by_id: @target_user.id)
PostAction.where(agreed_by_id: @source_user.id).update_all(agreed_by_id: @target_user.id)
PostAction.where(disagreed_by_id: @source_user.id).update_all(disagreed_by_id: @target_user.id)
PostRevision.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
Post
.with_deleted
.where(deleted_by_id: @source_user.id)
.update_all(deleted_by_id: @target_user.id)
Post
.with_deleted
.where(last_editor_id: @source_user.id)
.update_all(last_editor_id: @target_user.id)
Post.with_deleted.where(locked_by_id: @source_user.id).update_all(locked_by_id: @target_user.id)
Post
.with_deleted
.where(reply_to_user_id: @source_user.id)
.update_all(reply_to_user_id: @target_user.id)
Reviewable.where(created_by_id: @source_user.id).update_all(created_by_id: @target_user.id)
ReviewableHistory.where(created_by_id: @source_user.id).update_all(
created_by_id: @target_user.id,
)
SearchLog.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
update_user_id(:tag_users, conditions: "x.tag_id = y.tag_id")
Theme.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
update_user_id(:topic_allowed_users, conditions: "x.topic_id = y.topic_id")
TopicEmbed
.with_deleted
.where(deleted_by_id: @source_user.id)
.update_all(deleted_by_id: @target_user.id)
TopicLink.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
TopicLinkClick.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
TopicTimer
.with_deleted
.where(deleted_by_id: @source_user.id)
.update_all(deleted_by_id: @target_user.id)
update_user_id(
:topic_timers,
conditions: [
"x.status_type = y.status_type",
"x.topic_id = y.topic_id",
"y.deleted_at IS NULL",
],
)
update_user_id(:topic_users, conditions: "x.topic_id = y.topic_id")
update_user_id(:topic_views, conditions: "x.topic_id = y.topic_id")
Topic
.with_deleted
.where(deleted_by_id: @source_user.id)
.update_all(deleted_by_id: @target_user.id)
UnsubscribeKey.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
Upload.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
update_user_id(:user_archived_messages, conditions: "x.topic_id = y.topic_id")
update_user_id(
:user_actions,
user_id_column_name: "user_id",
conditions: [
"x.action_type = y.action_type",
"x.target_topic_id IS NOT DISTINCT FROM y.target_topic_id",
"x.target_post_id IS NOT DISTINCT FROM y.target_post_id",
"(x.acting_user_id IN (:source_user_id, :target_user_id) OR x.acting_user_id IS NOT DISTINCT FROM y.acting_user_id)",
],
)
update_user_id(
:user_actions,
user_id_column_name: "acting_user_id",
conditions: [
"x.action_type = y.action_type",
"x.user_id = y.user_id",
"x.target_topic_id IS NOT DISTINCT FROM y.target_topic_id",
"x.target_post_id IS NOT DISTINCT FROM y.target_post_id",
],
)
update_user_id(
:user_badges,
conditions: [
"x.badge_id = y.badge_id",
"x.seq = y.seq",
"x.post_id IS NOT DISTINCT FROM y.post_id",
],
)
UserBadge.where(granted_by_id: @source_user.id).update_all(granted_by_id: @target_user.id)
update_user_id(:user_custom_fields, conditions: "x.name = y.name")
if @target_user.human?
update_user_id(
:user_emails,
conditions: "x.email = y.email OR y.primary = false",
updates: '"primary" = false',
)
end
UserExport.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
UserHistory.where(target_user_id: @source_user.id).update_all(target_user_id: @target_user.id)
UserHistory.where(acting_user_id: @source_user.id).update_all(acting_user_id: @target_user.id)
UserProfileView.where(user_profile_id: @source_user.id).update_all(
user_profile_id: @target_user.id,
)
UserProfileView.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
UserWarning.where(user_id: @source_user.id).update_all(user_id: @target_user.id)
UserWarning.where(created_by_id: @source_user.id).update_all(created_by_id: @target_user.id)
User.where(approved_by_id: @source_user.id).update_all(approved_by_id: @target_user.id)
end
def delete_source_user
if @acting_user
::MessageBus.publish "/merge_user",
{ message: I18n.t("admin.user.merge_user.deleting_source_user") },
user_ids: [@acting_user.id]
end
@source_user.reload
@source_user.skip_email_validation = true
@source_user.update(
admin: false,
email: "#{@source_user.username}_#{SecureRandom.hex}@no-email.invalid",
)
UserDestroyer.new(Discourse.system_user).destroy(@source_user, quiet: true)
end
def log_merge
logger = StaffActionLogger.new(@acting_user || Discourse.system_user)
logger.log_user_merge(@target_user, @source_user.username, @source_primary_email || "")
end
def update_user_id(table_name, opts = {})
builder = update_user_id_sql_builder(table_name, opts)
builder.exec(source_user_id: @source_user.id, target_user_id: @target_user.id)
end
def update_user_id_sql_builder(table_name, opts = {})
user_id_column_name = opts[:user_id_column_name] || :user_id
conditions = Array.wrap(opts[:conditions])
updates = Array.wrap(opts[:updates])
builder = DB.build(<<~SQL)
UPDATE #{table_name} AS x
/*set*/
WHERE x.#{user_id_column_name} = :source_user_id AND NOT EXISTS(
SELECT 1
FROM #{table_name} AS y
/*where*/
)
SQL
builder.set("#{user_id_column_name} = :target_user_id")
updates.each { |u| builder.set(u) }
builder.where("y.#{user_id_column_name} = :target_user_id")
conditions.each { |c| builder.where(c) }
builder
end
end
``` | # frozen_string_literal: true
RSpec.describe UserMerger do
fab!(:target_user) { Fabricate(:user, username: "alice", email: "[email protected]") }
fab!(:source_user) { Fabricate(:user, username: "alice1", email: "[email protected]") }
fab!(:walter) { Fabricate(:walter_white) }
fab!(:coding_horror)
fab!(:p1) { Fabricate(:post) }
fab!(:p2) { Fabricate(:post) }
fab!(:p3) { Fabricate(:post) }
fab!(:p4) { Fabricate(:post) }
fab!(:p5) { Fabricate(:post) }
fab!(:p6) { Fabricate(:post) }
before { Group.refresh_automatic_groups! }
def merge_users!(source = nil, target = nil)
source ||= source_user
target ||= target_user
UserMerger.new(source, target).merge!
end
it "changes owner of topics and posts" do
topic1 = Fabricate(:topic, user: source_user)
post1 = Fabricate(:post, topic: topic1, user: source_user)
post2 = Fabricate(:post, topic: topic1, user: walter)
post3 = Fabricate(:post, topic: topic1, user: target_user)
post4 = Fabricate(:post, topic: topic1, user: walter)
post5 = Fabricate(:post, topic: topic1, user: source_user)
topic2 = Fabricate(:topic, user: walter)
post6 = Fabricate(:post, topic: topic2, user: walter)
post7 = Fabricate(:post, topic: topic2, user: source_user)
post8 = Fabricate(:post, topic: topic2, user: source_user, deleted_at: Time.now)
merge_users!
[topic1, post1, post3, post5, post7, post8].each do |x|
expect(x.reload.user).to eq(target_user)
end
[post2, post4, topic2, post6].each { |x| expect(x.reload.user).to eq(walter) }
end
it "changes owner of personal messages" do
pm_topic =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: target_user),
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: source_user),
],
)
post1 = Fabricate(:post, topic: pm_topic, user: source_user)
post2 = Fabricate(:post, topic: pm_topic, user: walter)
post3 = Fabricate(:post, topic: pm_topic, user: target_user)
post4 = Fabricate(:post, topic: pm_topic, user: source_user, deleted_at: Time.now)
small1 = pm_topic.add_small_action(source_user, "invited_user", "carol")
small2 = pm_topic.add_small_action(target_user, "invited_user", "david")
small3 = pm_topic.add_small_action(walter, "invited_user", "eve")
merge_users!
expect(post1.reload.user).to eq(target_user)
expect(post2.reload.user).to eq(walter)
expect(post3.reload.user).to eq(target_user)
expect(post4.reload.user).to eq(target_user)
expect(small1.reload.user).to eq(target_user)
expect(small2.reload.user).to eq(target_user)
expect(small3.reload.user).to eq(walter)
end
it "changes owner of categories" do
category = Fabricate(:category, user: source_user)
merge_users!
expect(category.reload.user).to eq(target_user)
end
it "merges category notification settings" do
category1 = Fabricate(:category)
category2 = Fabricate(:category)
category3 = Fabricate(:category)
watching = CategoryUser.notification_levels[:watching]
CategoryUser.batch_set(source_user, :watching, [category1.id, category2.id])
CategoryUser.batch_set(target_user, :watching, [category2.id, category3.id])
merge_users!
category_ids =
CategoryUser.where(user_id: target_user.id, notification_level: watching).pluck(:category_id)
expect(category_ids).to contain_exactly(category1.id, category2.id, category3.id)
category_ids =
CategoryUser.where(user_id: source_user.id, notification_level: watching).pluck(:category_id)
expect(category_ids).to be_empty
end
context "with developer flag" do
it "moves the developer flag when the target user isn't a developer yet" do
Developer.create!(user_id: source_user.id)
merge_users!
expect(Developer.where(user_id: source_user.id).count).to eq(0)
expect(Developer.where(user_id: target_user.id).count).to eq(1)
end
it "deletes the source's developer flag when the target user is already a developer" do
Developer.create!(user_id: source_user.id)
Developer.create!(user_id: target_user.id)
merge_users!
expect(Developer.where(user_id: source_user.id).count).to eq(0)
expect(Developer.where(user_id: target_user.id).count).to eq(1)
end
end
context "with drafts" do
def create_draft(user, key, text)
seq = DraftSequence.next!(user, key)
Draft.set(user, key, seq, text)
end
def current_target_user_draft(key)
seq = DraftSequence.current(target_user, key)
Draft.get(target_user, key, seq)
end
it "merges drafts" do
key_topic_17 = "#{Draft::EXISTING_TOPIC}#{17}"
key_topic_19 = "#{Draft::EXISTING_TOPIC}#{19}"
create_draft(source_user, Draft::NEW_TOPIC, "new topic draft by alice1")
create_draft(source_user, key_topic_17, "draft by alice1")
create_draft(source_user, key_topic_19, "draft by alice1")
create_draft(target_user, key_topic_19, "draft by alice")
merge_users!
expect(current_target_user_draft(Draft::NEW_TOPIC)).to eq("new topic draft by alice1")
expect(current_target_user_draft(key_topic_17)).to eq("draft by alice1")
expect(current_target_user_draft(key_topic_19)).to eq("draft by alice")
expect(DraftSequence.where(user_id: source_user.id).count).to eq(0)
expect(Draft.where(user_id: source_user.id).count).to eq(0)
end
end
it "updates email logs" do
Fabricate(:email_log, user: source_user)
merge_users!
expect(EmailLog.where(user_id: source_user.id).count).to eq(0)
expect(EmailLog.where(user_id: target_user.id).count).to eq(1)
end
context "with likes" do
def given_daily_like_count_for(user, date)
GivenDailyLike.find_for(user.id, date).pluck(:likes_given)[0] || 0
end
it "merges likes" do
now = Time.zone.now
freeze_time(now - 1.day)
PostActionCreator.like(source_user, p1)
PostActionCreator.like(source_user, p2)
PostActionCreator.like(target_user, p2)
PostActionCreator.like(target_user, p3)
freeze_time(now)
PostActionCreator.like(source_user, p4)
PostActionCreator.like(source_user, p5)
PostActionCreator.like(target_user, p5)
PostActionCreator.like(source_user, p6)
PostActionDestroyer.destroy(source_user, p6, :like)
merge_users!
[p1, p2, p3, p4, p5].each { |p| expect(p.reload.like_count).to eq(1) }
expect(PostAction.with_deleted.where(user_id: source_user.id).count).to eq(0)
expect(PostAction.with_deleted.where(user_id: target_user.id).count).to eq(6)
expect(given_daily_like_count_for(source_user, Date.yesterday)).to eq(0)
expect(given_daily_like_count_for(target_user, Date.yesterday)).to eq(3)
expect(given_daily_like_count_for(source_user, Date.today)).to eq(0)
expect(given_daily_like_count_for(target_user, Date.today)).to eq(2)
end
end
it "updates group history" do
group = Fabricate(:group)
group.add_owner(source_user)
logger = GroupActionLogger.new(source_user, group)
logger.log_add_user_to_group(walter)
logger.log_add_user_to_group(target_user)
group = Fabricate(:group)
group.add_owner(target_user)
logger = GroupActionLogger.new(target_user, group)
logger.log_add_user_to_group(walter)
logger.log_add_user_to_group(source_user)
merge_users!
expect(GroupHistory.where(acting_user_id: source_user.id).count).to eq(0)
expect(GroupHistory.where(acting_user_id: target_user.id).count).to eq(4)
expect(GroupHistory.where(target_user_id: source_user.id).count).to eq(0)
expect(GroupHistory.where(target_user_id: target_user.id).count).to eq(2)
end
it "merges group memberships" do
group1 = Fabricate(:group)
group1.add_owner(target_user)
group1.bulk_add([walter.id, source_user.id])
group2 = Fabricate(:group)
group2.bulk_add([walter.id, target_user.id])
group3 = Fabricate(:group)
group3.add_owner(source_user)
group3.add(walter)
merge_users!
[group1, group2, group3].each do |g|
owner = [group1, group3].include?(g)
expect(GroupUser.where(group_id: g.id, user_id: target_user.id, owner: owner).count).to eq(1)
expect(Group.where(id: g.id).pick(:user_count)).to eq(2)
end
expect(GroupUser.where(user_id: source_user.id).count).to eq(0)
end
it "updates incoming emails" do
email = Fabricate(:incoming_email, user: source_user)
merge_users!
expect(email.reload.user).to eq(target_user)
end
it "updates incoming links" do
link1 = Fabricate(:incoming_link, user: source_user)
link2 = Fabricate(:incoming_link, current_user_id: source_user.id)
merge_users!
expect(link1.reload.user).to eq(target_user)
expect(link2.reload.current_user_id).to eq(target_user.id)
end
it "updates invites" do
invite1 = Fabricate(:invite, invited_by: walter)
Fabricate(:invited_user, invite: invite1, user: source_user)
invite2 = Fabricate(:invite, invited_by: source_user)
invite3 = Fabricate(:invite, invited_by: source_user)
invite3.trash!(source_user)
merge_users!
[invite1, invite2, invite3].each { |x| x.reload }
expect(invite1.invited_users.first.user).to eq(target_user)
expect(invite2.invited_by).to eq(target_user)
expect(invite3.invited_by).to eq(target_user)
expect(invite3.deleted_by).to eq(target_user)
end
it "merges muted users" do
muted1 = Fabricate(:user)
muted2 = Fabricate(:user)
muted3 = Fabricate(:user)
MutedUser.create!(user_id: source_user.id, muted_user_id: muted1.id)
MutedUser.create!(user_id: source_user.id, muted_user_id: muted2.id)
MutedUser.create!(user_id: target_user.id, muted_user_id: muted2.id)
MutedUser.create!(user_id: target_user.id, muted_user_id: muted3.id)
MutedUser.create!(user_id: walter.id, muted_user_id: source_user.id)
MutedUser.create!(user_id: coding_horror.id, muted_user_id: source_user.id)
MutedUser.create!(user_id: coding_horror.id, muted_user_id: target_user.id)
merge_users!
[muted1, muted2, muted3].each do |m|
expect(MutedUser.where(user_id: target_user.id, muted_user_id: m.id).count).to eq(1)
end
expect(MutedUser.where(user_id: source_user.id).count).to eq(0)
expect(MutedUser.where(user_id: walter.id, muted_user_id: target_user.id).count).to eq(1)
expect(MutedUser.where(user_id: coding_horror.id, muted_user_id: target_user.id).count).to eq(1)
expect(MutedUser.where(muted_user_id: source_user.id).count).to eq(0)
end
it "merges ignored users" do
ignored1 = Fabricate(:user)
ignored2 = Fabricate(:user)
ignored3 = Fabricate(:user)
Fabricate(:ignored_user, user: source_user, ignored_user: ignored1)
Fabricate(:ignored_user, user: source_user, ignored_user: ignored2)
Fabricate(:ignored_user, user: target_user, ignored_user: ignored2)
Fabricate(:ignored_user, user: target_user, ignored_user: ignored3)
Fabricate(:ignored_user, user: walter, ignored_user: source_user)
Fabricate(:ignored_user, user: coding_horror, ignored_user: source_user)
Fabricate(:ignored_user, user: coding_horror, ignored_user: target_user)
merge_users!
[ignored1, ignored2, ignored3].each do |m|
expect(IgnoredUser.where(user_id: target_user.id, ignored_user_id: m.id).count).to eq(1)
end
expect(IgnoredUser.where(user_id: source_user.id).count).to eq(0)
expect(IgnoredUser.where(user_id: walter.id, ignored_user_id: target_user.id).count).to eq(1)
expect(
IgnoredUser.where(user_id: coding_horror.id, ignored_user_id: target_user.id).count,
).to eq(1)
expect(IgnoredUser.where(ignored_user_id: source_user.id).count).to eq(0)
end
context "with notifications" do
it "updates notifications" do
Fabricate(:notification, user: source_user)
Fabricate(:notification, user: source_user)
Fabricate(:notification, user: walter)
merge_users!
expect(Notification.where(user_id: target_user.id).count).to eq(2)
expect(Notification.where(user_id: source_user.id).count).to eq(0)
end
end
context "with post actions" do
it "merges post actions" do
type_ids = PostActionType.public_type_ids + [PostActionType.flag_types.values.first]
type_ids.each do |type|
PostActionCreator.new(source_user, p1, type).perform
PostActionCreator.new(source_user, p2, type).perform
PostActionCreator.new(target_user, p2, type).perform
PostActionCreator.new(target_user, p3, type).perform
end
merge_users!
type_ids.each do |type|
expect(
PostAction.where(user_id: target_user.id, post_action_type_id: type).pluck(:post_id),
).to contain_exactly(p1.id, p2.id, p3.id)
end
expect(PostAction.where(user_id: source_user.id).count).to eq(0)
end
it "updates post actions" do
action1 = PostActionCreator.create(source_user, p1, :off_topic).post_action
action1.update_attribute(:deleted_by_id, source_user.id)
action2 = PostActionCreator.create(source_user, p2, :off_topic).post_action
action2.update_attribute(:deferred_by_id, source_user.id)
action3 = PostActionCreator.create(source_user, p3, :off_topic).post_action
action3.update_attribute(:agreed_by_id, source_user.id)
action4 = PostActionCreator.create(source_user, p4, :off_topic).post_action
action4.update_attribute(:disagreed_by_id, source_user.id)
merge_users!
expect(action1.reload.deleted_by_id).to eq(target_user.id)
expect(action2.reload.deferred_by_id).to eq(target_user.id)
expect(action3.reload.agreed_by_id).to eq(target_user.id)
expect(action4.reload.disagreed_by_id).to eq(target_user.id)
end
end
it "updates post revisions" do
post = p1
post_revision = Fabricate(:post_revision, post: post, user: source_user)
merge_users!
expect(post_revision.reload.user).to eq(target_user)
end
context "with post timings" do
def create_post_timing(post, user, msecs)
PostTiming.create!(
topic_id: post.topic_id,
post_number: post.post_number,
user_id: user.id,
msecs: msecs,
)
end
def post_timing_msecs_for(post, user)
PostTiming.where(
topic_id: post.topic_id,
post_number: post.post_number,
user_id: user.id,
).pluck(:msecs)[
0
] || 0
end
it "merges post timings" do
post1 = p1
post2 = p2
post3 = p3
post4 = p4
create_post_timing(post1, source_user, 12_345)
create_post_timing(post2, source_user, 9876)
create_post_timing(post4, source_user, 2**31 - 100)
create_post_timing(post2, target_user, 3333)
create_post_timing(post3, target_user, 10_000)
create_post_timing(post4, target_user, 5000)
merge_users!
expect(post_timing_msecs_for(post1, target_user)).to eq(12_345)
expect(post_timing_msecs_for(post2, target_user)).to eq(13_209)
expect(post_timing_msecs_for(post3, target_user)).to eq(10_000)
expect(post_timing_msecs_for(post4, target_user)).to eq(2**31 - 1)
expect(PostTiming.where(user_id: source_user.id).count).to eq(0)
end
end
context "with posts" do
it "updates user ids of posts" do
source_user.update_attribute(:moderator, true)
topic = Fabricate(:topic)
Fabricate(:post, topic: topic, user: source_user)
post2 = Fabricate(:basic_reply, topic: topic, user: walter)
post2.revise(source_user, raw: "#{post2.raw} foo")
PostLocker.new(post2, source_user).lock
post2.trash!(source_user)
merge_users!
post2.reload
expect(post2.deleted_by).to eq(target_user)
expect(post2.last_editor).to eq(target_user)
expect(post2.locked_by_id).to eq(target_user.id)
expect(post2.reply_to_user).to eq(target_user)
end
it "updates post action counts" do
posts = {}
PostActionType.types.each do |type_name, type_id|
posts[type_name] = post = Fabricate(:post, user: walter)
PostActionCreator.new(source_user, post, type_id).perform
PostActionCreator.new(target_user, post, type_id).perform
end
merge_users!
posts.each do |type, post|
post.reload
expect(post.public_send("#{type}_count")).to eq(1)
end
end
end
it "updates reviewables and reviewable history" do
reviewable = Fabricate(:reviewable_queued_post, created_by: source_user)
merge_users!
expect(reviewable.reload.created_by).to eq(target_user)
expect(reviewable.reviewable_histories.first.created_by).to eq(target_user)
end
describe "search logs" do
after { SearchLog.clear_debounce_cache! }
it "updates search log entries" do
SearchLog.log(
term: "hello",
search_type: :full_page,
ip_address: "192.168.0.1",
user_id: source_user.id,
)
SearchLog.log(
term: "world",
search_type: :full_page,
ip_address: "192.168.0.1",
user_id: source_user.id,
)
SearchLog.log(
term: "star trek",
search_type: :full_page,
ip_address: "192.168.0.2",
user_id: target_user.id,
)
SearchLog.log(
term: "bad",
search_type: :full_page,
ip_address: "192.168.0.3",
user_id: walter.id,
)
merge_users!
expect(SearchLog.where(user_id: target_user.id).count).to eq(3)
expect(SearchLog.where(user_id: source_user.id).count).to eq(0)
expect(SearchLog.where(user_id: walter.id).count).to eq(1)
end
end
it "merges tag notification settings" do
tag1 = Fabricate(:tag)
tag2 = Fabricate(:tag)
tag3 = Fabricate(:tag)
watching = TagUser.notification_levels[:watching]
TagUser.batch_set(source_user, :watching, [tag1.name, tag2.name])
TagUser.batch_set(target_user, :watching, [tag2.name, tag3.name])
merge_users!
tag_ids = TagUser.where(user_id: target_user.id, notification_level: watching).pluck(:tag_id)
expect(tag_ids).to contain_exactly(tag1.id, tag2.id, tag3.id)
tag_ids = TagUser.where(user_id: source_user.id, notification_level: watching).pluck(:tag_id)
expect(tag_ids).to be_empty
end
it "updates themes" do
theme = Fabricate(:theme, user: source_user)
merge_users!
expect(theme.reload.user_id).to eq(target_user.id)
end
it "merges allowed users for topics" do
pm_topic1 =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: target_user),
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: source_user),
],
)
pm_topic2 =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: source_user),
],
)
merge_users!
expect(pm_topic1.allowed_users).to contain_exactly(target_user, walter)
expect(pm_topic2.allowed_users).to contain_exactly(target_user, walter)
expect(TopicAllowedUser.where(user_id: source_user.id).count).to eq(0)
end
it "updates topic embeds" do
topic_embed = Fabricate(:topic_embed, embed_url: "http://example.com/post/248")
topic_embed.trash!(source_user)
merge_users!
expect(topic_embed.reload.deleted_by).to eq(target_user)
end
it "updates topic links" do
topic = Fabricate(:topic, user: source_user)
post = Fabricate(:post_with_external_links, user: source_user, topic: topic)
TopicLink.extract_from(post)
link = topic.topic_links.first
TopicLinkClick.create!(topic_link_id: link.id, user_id: source_user.id, ip_address: "127.0.0.1")
TopicLinkClick.create!(topic_link_id: link.id, user_id: target_user.id, ip_address: "127.0.0.1")
TopicLinkClick.create!(topic_link_id: link.id, user_id: walter.id, ip_address: "127.0.0.1")
merge_users!
expect(TopicLink.where(user_id: target_user.id).count).to be > 0
expect(TopicLink.where(user_id: source_user.id).count).to eq(0)
expect(TopicLinkClick.where(user_id: target_user.id).count).to eq(2)
expect(TopicLinkClick.where(user_id: source_user.id).count).to eq(0)
expect(TopicLinkClick.where(user_id: walter.id).count).to eq(1)
end
context "with topic timers" do
def create_topic_timer(topic, user, status_type, deleted_by = nil)
timer =
Fabricate(
:topic_timer,
topic: topic,
user: user,
status_type: TopicTimer.types[status_type],
)
timer.trash!(deleted_by) if deleted_by
timer.reload
end
it "merges topic timers" do
topic1 = Fabricate(:topic)
timer1 = create_topic_timer(topic1, source_user, :close, Discourse.system_user)
timer2 = create_topic_timer(topic1, source_user, :close)
timer3 = create_topic_timer(topic1, source_user, :reminder, source_user)
timer4 = create_topic_timer(topic1, target_user, :reminder, target_user)
timer5 = create_topic_timer(topic1, source_user, :reminder)
topic2 = Fabricate(:topic)
timer6 = create_topic_timer(topic2, target_user, :close)
timer7 = create_topic_timer(topic2, target_user, :reminder, Discourse.system_user)
create_topic_timer(topic2, source_user, :reminder, Discourse.system_user)
merge_users!
[timer1, timer2, timer3, timer4, timer5, timer6, timer7].each do |t|
expect(t.reload.user).to eq(target_user)
end
expect(TopicTimer.with_deleted.where(user_id: source_user.id).count).to eq(0)
expect(TopicTimer.with_deleted.where(deleted_by_id: target_user.id).count).to eq(2)
expect(TopicTimer.with_deleted.where(deleted_by_id: source_user.id).count).to eq(0)
end
end
it "merges topic notification settings" do
topic1 = Fabricate(:topic)
topic2 = Fabricate(:topic)
topic3 = Fabricate(:topic)
watching = TopicUser.notification_levels[:watching]
Fabricate(:topic_user, notification_level: watching, topic: topic1, user: source_user)
Fabricate(:topic_user, notification_level: watching, topic: topic2, user: source_user)
Fabricate(:topic_user, notification_level: watching, topic: topic2, user: target_user)
Fabricate(:topic_user, notification_level: watching, topic: topic3, user: target_user)
merge_users!
topic_ids =
TopicUser.where(user_id: target_user.id, notification_level: watching).pluck(:topic_id)
expect(topic_ids).to contain_exactly(topic1.id, topic2.id, topic3.id)
topic_ids =
TopicUser.where(user_id: source_user.id, notification_level: watching).pluck(:topic_id)
expect(topic_ids).to be_empty
end
it "merges topic views" do
topic1 = Fabricate(:topic)
topic2 = Fabricate(:topic)
topic3 = Fabricate(:topic)
ip = "127.0.0.1"
TopicViewItem.add(topic1.id, ip, source_user.id)
TopicViewItem.add(topic2.id, ip, source_user.id)
TopicViewItem.add(topic2.id, ip, target_user.id)
TopicViewItem.add(topic3.id, ip, target_user.id)
merge_users!
topic_ids = TopicViewItem.where(user_id: target_user.id).pluck(:topic_id)
expect(topic_ids).to contain_exactly(topic1.id, topic2.id, topic3.id)
expect(TopicViewItem.where(user_id: source_user.id).count).to eq(0)
end
it "updates topics" do
topic = Fabricate(:topic)
Fabricate(:post, user: walter, topic: topic)
Fabricate(:post, user: source_user, topic: topic)
topic.trash!(source_user)
merge_users!
topic.reload
expect(topic.deleted_by).to eq(target_user)
expect(topic.last_poster).to eq(target_user)
end
it "updates unsubscribe keys" do
UnsubscribeKey.create_key_for(source_user, UnsubscribeKey::DIGEST_TYPE)
UnsubscribeKey.create_key_for(target_user, UnsubscribeKey::DIGEST_TYPE)
UnsubscribeKey.create_key_for(walter, UnsubscribeKey::DIGEST_TYPE)
merge_users!
expect(UnsubscribeKey.where(user_id: target_user.id).count).to eq(2)
expect(UnsubscribeKey.where(user_id: source_user.id).count).to eq(0)
end
it "updates uploads" do
Fabricate(:upload, user: source_user)
Fabricate(:upload, user: target_user)
Fabricate(:upload, user: walter)
merge_users!
expect(Upload.where(user_id: target_user.id).count).to eq(2)
expect(Upload.where(user_id: source_user.id).count).to eq(0)
end
context "with user actions" do
# action_type and user_id are not nullable
# target_topic_id and acting_user_id are nullable, but always have a value
fab!(:post1) { p1 }
fab!(:post2) { p2 }
def log_like_action(acting_user, user, post)
UserAction.log_action!(
action_type: UserAction::LIKE,
user_id: user.id,
acting_user_id: acting_user.id,
target_topic_id: post.topic_id,
target_post_id: post.id,
)
end
def log_got_private_message(acting_user, user, topic)
UserAction.log_action!(
action_type: UserAction::GOT_PRIVATE_MESSAGE,
user_id: user.id,
acting_user_id: acting_user.id,
target_topic_id: topic.id,
target_post_id: -1,
)
end
it "merges when target_post_id is set" do
_a1 = log_like_action(source_user, walter, post1)
a2 = log_like_action(target_user, walter, post1)
a3 = log_like_action(source_user, walter, post2)
merge_users!
expect(UserAction.count).to eq(2)
action_ids =
UserAction.where(
action_type: UserAction::LIKE,
user_id: walter.id,
acting_user_id: target_user.id,
).pluck(:id)
expect(action_ids).to contain_exactly(a2.id, a3.id)
end
it "merges when acting_user is neither source_user nor target_user" do
pm_topic1 =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: source_user),
Fabricate.build(:topic_allowed_user, user: target_user),
Fabricate.build(:topic_allowed_user, user: coding_horror),
],
)
pm_topic2 =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: source_user),
],
)
pm_topic3 =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: target_user),
],
)
_a1 = log_got_private_message(walter, source_user, pm_topic1)
a2 = log_got_private_message(walter, target_user, pm_topic1)
_a3 = log_got_private_message(walter, coding_horror, pm_topic1)
a4 = log_got_private_message(walter, source_user, pm_topic2)
a5 = log_got_private_message(walter, target_user, pm_topic3)
merge_users!
expect(UserAction.count).to eq(4)
action_ids =
UserAction.where(
action_type: UserAction::GOT_PRIVATE_MESSAGE,
user_id: target_user.id,
acting_user_id: walter.id,
).pluck(:id)
expect(action_ids).to contain_exactly(a2.id, a4.id, a5.id)
end
end
it "merges archived messages" do
pm_topic1 =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: target_user),
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: source_user),
],
)
pm_topic2 =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: walter),
Fabricate.build(:topic_allowed_user, user: source_user),
],
)
UserArchivedMessage.archive!(source_user.id, pm_topic1)
UserArchivedMessage.archive!(target_user.id, pm_topic1)
UserArchivedMessage.archive!(source_user.id, pm_topic2)
UserArchivedMessage.archive!(walter.id, pm_topic2)
merge_users!
topic_ids = UserArchivedMessage.where(user_id: target_user.id).pluck(:topic_id)
expect(topic_ids).to contain_exactly(pm_topic1.id, pm_topic2.id)
expect(UserArchivedMessage.where(user_id: source_user.id).count).to eq(0)
end
context "with badges" do
def create_badge(badge, user, opts = {})
UserBadge.create!(
badge: badge,
user: user,
granted_by: opts[:granted_by] || Discourse.system_user,
granted_at: opts[:granted_at] || Time.now,
post: opts[:post],
seq: opts[:seq] || 0,
)
end
it "merges user badges" do
anniversary_badge = Badge.find(Badge::Anniversary)
create_badge(anniversary_badge, source_user, seq: 1)
b1 = create_badge(anniversary_badge, target_user, seq: 1)
b2 = create_badge(anniversary_badge, source_user, seq: 2)
great_post_badge = Badge.find(Badge::GreatPost)
b3 = create_badge(great_post_badge, target_user, post: Fabricate(:post, user: target_user))
b4 = create_badge(great_post_badge, source_user, post: Fabricate(:post, user: source_user))
autobiographer_badge = Badge.find(Badge::Autobiographer)
b5 = create_badge(autobiographer_badge, source_user)
merge_users!
user_badge_ids = UserBadge.where(user_id: target_user.id).pluck(:id)
expect(user_badge_ids).to contain_exactly(b1.id, b2.id, b3.id, b4.id, b5.id)
expect(UserBadge.where(user_id: source_user.id).count).to eq(0)
end
it "updates granted_by for user badges" do
badge = Badge.create!(name: "Hero", badge_type_id: BadgeType::Gold)
user_badge = create_badge(badge, walter, seq: 1, granted_by: source_user)
merge_users!
expect(user_badge.reload.granted_by).to eq(target_user)
end
end
it "merges user custom fields" do
UserCustomField.create!(user_id: source_user.id, name: "foo", value: "123")
UserCustomField.create!(user_id: source_user.id, name: "bar", value: "456")
UserCustomField.create!(user_id: source_user.id, name: "duplicate", value: "source")
UserCustomField.create!(user_id: target_user.id, name: "duplicate", value: "target")
UserCustomField.create!(user_id: target_user.id, name: "baz", value: "789")
merge_users!
fields = UserCustomField.where(user_id: target_user.id).pluck(:name, :value)
expect(fields).to contain_exactly(%w[foo 123], %w[bar 456], %w[duplicate target], %w[baz 789])
expect(UserCustomField.where(user_id: source_user.id).count).to eq(0)
end
it "merges email addresses" do
merge_users!
emails = UserEmail.where(user_id: target_user.id).pluck(:email, :primary)
expect(emails).to contain_exactly(["[email protected]", true], ["[email protected]", false])
expect(UserEmail.where(user_id: source_user.id).count).to eq(0)
end
it "skips merging email addresses when a secondary email address exists" do
merge_users!(source_user, target_user)
alice2 = Fabricate(:user, username: "alice2", email: "[email protected]")
merge_users!(alice2, target_user)
emails = UserEmail.where(user_id: target_user.id).pluck(:email, :primary)
expect(emails).to contain_exactly(["[email protected]", true], ["[email protected]", false])
expect(UserEmail.where(user_id: source_user.id).count).to eq(0)
end
it "skips merging email addresses when target user is not human" do
target_user = Discourse.system_user
merge_users!(source_user, target_user)
emails = UserEmail.where(user_id: target_user.id).pluck(:email, :primary)
expect(emails).to contain_exactly([target_user.email, true])
expect(UserEmail.exists?(user_id: source_user.id)).to eq(false)
end
it "updates exports" do
UserExport.create(file_name: "user-archive-alice1-190218-003249", user_id: source_user.id)
merge_users!
expect(UserExport.where(user_id: target_user.id).count).to eq(1)
expect(UserExport.where(user_id: source_user.id).count).to eq(0)
end
it "updates user history" do
UserHistory.create(
action: UserHistory.actions[:notified_about_get_a_room],
target_user_id: source_user.id,
)
UserHistory.create(
action: UserHistory.actions[:anonymize_user],
target_user_id: walter.id,
acting_user_id: source_user.id,
)
merge_users!
UserHistory.where(
action: UserHistory.actions[:merge_user],
target_user_id: target_user.id,
).delete_all
expect(UserHistory.where(target_user_id: target_user.id).count).to eq(1)
expect(UserHistory.where(target_user_id: source_user.id).count).to eq(0)
expect(UserHistory.where(acting_user_id: target_user.id).count).to eq(1)
expect(UserHistory.where(acting_user_id: source_user.id).count).to eq(0)
end
it "updates user profile views" do
ip = "127.0.0.1"
UserProfileView.add(source_user.id, ip, walter.id, Time.now, true)
UserProfileView.add(source_user.id, ip, target_user.id, Time.now, true)
UserProfileView.add(target_user.id, ip, source_user.id, Time.now, true)
UserProfileView.add(walter.id, ip, source_user.id, Time.now, true)
merge_users!
expect(UserProfileView.where(user_profile_id: target_user.id).count).to eq(3)
expect(UserProfileView.where(user_profile_id: walter.id).count).to eq(1)
expect(UserProfileView.where(user_profile_id: source_user.id).count).to eq(0)
expect(UserProfileView.where(user_id: target_user.id).count).to eq(3)
expect(UserProfileView.where(user_id: walter.id).count).to eq(1)
expect(UserProfileView.where(user_id: source_user.id).count).to eq(0)
end
it "merges user visits" do
freeze_time DateTime.parse("2010-01-01 12:00")
UserVisit.create!(
user_id: source_user.id,
visited_at: 2.days.ago,
posts_read: 22,
mobile: false,
time_read: 400,
)
UserVisit.create!(
user_id: source_user.id,
visited_at: Date.yesterday,
posts_read: 8,
mobile: false,
time_read: 100,
)
UserVisit.create!(
user_id: target_user.id,
visited_at: Date.yesterday,
posts_read: 12,
mobile: true,
time_read: 270,
)
UserVisit.create!(
user_id: target_user.id,
visited_at: Date.today,
posts_read: 10,
mobile: true,
time_read: 150,
)
merge_users!
expect(UserVisit.where(user_id: target_user.id).count).to eq(3)
expect(UserVisit.where(user_id: source_user.id).count).to eq(0)
expect(
UserVisit.where(
user_id: target_user.id,
visited_at: 2.days.ago,
posts_read: 22,
mobile: false,
time_read: 400,
).count,
).to eq(1)
expect(
UserVisit.where(
user_id: target_user.id,
visited_at: Date.yesterday,
posts_read: 20,
mobile: true,
time_read: 370,
).count,
).to eq(1)
expect(
UserVisit.where(
user_id: target_user.id,
visited_at: Date.today,
posts_read: 10,
mobile: true,
time_read: 150,
).count,
).to eq(1)
end
it "updates user warnings" do
UserWarning.create!(topic: Fabricate(:topic), user: source_user, created_by: walter)
UserWarning.create!(topic: Fabricate(:topic), user: target_user, created_by: walter)
UserWarning.create!(topic: Fabricate(:topic), user: walter, created_by: source_user)
merge_users!
expect(UserWarning.where(user_id: target_user.id).count).to eq(2)
expect(UserWarning.where(user_id: source_user.id).count).to eq(0)
expect(UserWarning.where(created_by_id: target_user.id).count).to eq(1)
expect(UserWarning.where(created_by_id: source_user.id).count).to eq(0)
end
it "triggers :merging_users event" do
events = DiscourseEvent.track_events { merge_users! }
expect(events).to include(event_name: :merging_users, params: [source_user, target_user])
end
context "with site settings" do
it "updates usernames in site settings" do
SiteSetting.site_contact_username = source_user.username
SiteSetting.embed_by_username = source_user.username
merge_users!
expect(SiteSetting.site_contact_username).to eq(target_user.username)
expect(SiteSetting.embed_by_username).to eq(target_user.username)
end
it "updates only the old username in site settings" do
SiteSetting.site_contact_username = source_user.username
SiteSetting.embed_by_username = walter.username
merge_users!
expect(SiteSetting.site_contact_username).to eq(target_user.username)
expect(SiteSetting.embed_by_username).to eq(walter.username)
end
end
it "updates users" do
walter.update!(approved_by: source_user)
upload = Fabricate(:upload)
source_user.update!(admin: true)
source_user.user_profile.update!(
card_background_upload: upload,
profile_background_upload: upload,
)
merge_users!
expect(walter.reload.approved_by).to eq(target_user)
target_user.reload
expect(target_user.admin).to eq(true)
expect(target_user.card_background_upload).to eq(upload)
expect(target_user.profile_background_upload).to eq(upload)
end
it "deletes the source user even when it's an admin" do
source_user.update_attribute(:admin, true)
expect(User.find_by_username(source_user.username)).to be_present
merge_users!
expect(User.find_by_username(source_user.username)).to be_nil
end
it "deletes the source user even when it is a member of a group that grants a trust level" do
group = Fabricate(:group, grant_trust_level: 3)
group.bulk_add([source_user.id, target_user.id])
merge_users!
expect(User.find_by_username(source_user.username)).to be_nil
end
it "works even when email domains are restricted" do
SiteSetting.allowed_email_domains = "example.com|work.com"
source_user.update_attribute(:admin, true)
expect(User.find_by_username(source_user.username)).to be_present
merge_users!
expect(User.find_by_username(source_user.username)).to be_nil
end
it "deletes external auth infos of source user" do
UserAssociatedAccount.create(
user_id: source_user.id,
provider_name: "facebook",
provider_uid: "1234",
)
SingleSignOnRecord.create(
user_id: source_user.id,
external_id: "example",
last_payload: "looks good",
)
merge_users!
expect(UserAssociatedAccount.where(user_id: source_user.id).count).to eq(0)
expect(SingleSignOnRecord.where(user_id: source_user.id).count).to eq(0)
end
it "deletes auth tokens" do
Fabricate(:api_key, user: source_user)
Fabricate(:readonly_user_api_key, user: source_user)
Fabricate(:user_second_factor_totp, user: source_user)
SiteSetting.verbose_auth_token_logging = true
UserAuthToken.generate!(user_id: source_user.id, user_agent: "Firefox", client_ip: "127.0.0.1")
merge_users!
expect(ApiKey.where(user_id: source_user.id).count).to eq(0)
expect(UserApiKey.where(user_id: source_user.id).count).to eq(0)
expect(UserSecondFactor.where(user_id: source_user.id).count).to eq(0)
expect(UserAuthToken.where(user_id: source_user.id).count).to eq(0)
expect(UserAuthTokenLog.where(user_id: source_user.id).count).to eq(0)
end
it "cleans up all remaining references to the source user" do
DirectoryItem.refresh!
Fabricate(:email_change_request, user: source_user)
Fabricate(:email_token, user: source_user)
Fabricate(:user_avatar, user: source_user)
merge_users!
expect(DirectoryItem.where(user_id: source_user.id).count).to eq(0)
expect(EmailChangeRequest.where(user_id: source_user.id).count).to eq(0)
expect(EmailToken.where(user_id: source_user.id).count).to eq(0)
expect(UserAvatar.where(user_id: source_user.id).count).to eq(0)
expect(User.find_by_username(source_user.username)).to be_nil
end
it "updates the username" do
Jobs::UpdateUsername
.any_instance
.expects(:execute)
.with(
{
user_id: source_user.id,
old_username: "alice1",
new_username: "alice",
avatar_template: target_user.avatar_template,
},
)
.once
merge_users!
end
it "correctly logs the merge" do
expect { merge_users! }.to change { UserHistory.count }.by(1)
log_entry = UserHistory.last
expect(log_entry.action).to eq(UserHistory.actions[:merge_user])
expect(log_entry.acting_user_id).to eq(Discourse::SYSTEM_USER_ID)
expect(log_entry.target_user_id).to eq(target_user.id)
expect(log_entry.context).to eq(
I18n.t("staff_action_logs.user_merged", username: source_user.username),
)
expect(log_entry.email).to eq("[email protected]")
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PostAlerter
USER_BATCH_SIZE = 100
def self.post_created(post, opts = {})
PostAlerter.new(opts).after_save_post(post, true)
post
end
def self.post_edited(post, opts = {})
PostAlerter.new(opts).after_save_post(post, false)
post
end
def self.create_notification_alert(user:, post:, notification_type:, excerpt: nil, username: nil)
return if user.suspended?
if post_url = post.url
payload = {
notification_type: notification_type,
post_number: post.post_number,
topic_title: post.topic.title,
topic_id: post.topic.id,
excerpt:
excerpt ||
post.excerpt(
400,
text_entities: true,
strip_links: true,
remap_emoji: true,
plain_hashtags: true,
),
username: username || post.username,
post_url: post_url,
}
DiscourseEvent.trigger(:pre_notification_alert, user, payload)
if user.allow_live_notifications?
MessageBus.publish("/notification-alert/#{user.id}", payload, user_ids: [user.id])
end
push_notification(user, payload)
# deprecated. use push_notification instead
DiscourseEvent.trigger(:post_notification_alert, user, payload)
end
end
def self.push_notification(user, payload)
return if user.do_not_disturb?
DiscoursePluginRegistry.push_notification_filters.each do |filter|
return unless filter.call(user, payload)
end
if user.push_subscriptions.exists?
if user.seen_since?(SiteSetting.push_notification_time_window_mins.minutes.ago)
delay =
(SiteSetting.push_notification_time_window_mins - (Time.now - user.last_seen_at) / 60)
Jobs.enqueue_in(delay.minutes, :send_push_notification, user_id: user.id, payload: payload)
else
Jobs.enqueue(:send_push_notification, user_id: user.id, payload: payload)
end
end
if SiteSetting.allow_user_api_key_scopes.split("|").include?("push") &&
SiteSetting.allowed_user_api_push_urls.present?
clients =
user
.user_api_keys
.joins(:scopes)
.where("user_api_key_scopes.name IN ('push', 'notifications')")
.where("push_url IS NOT NULL AND push_url <> ''")
.where("position(push_url IN ?) > 0", SiteSetting.allowed_user_api_push_urls)
.where("revoked_at IS NULL")
.order(client_id: :asc)
.pluck(:client_id, :push_url)
if clients.length > 0
Jobs.enqueue(:push_notification, clients: clients, payload: payload, user_id: user.id)
end
end
DiscourseEvent.trigger(:push_notification, user, payload)
end
def initialize(default_opts = {})
@default_opts = default_opts
end
def not_allowed?(user, post)
user.blank? || user.bot? || user.id == post.user_id
end
def all_allowed_users(post)
@all_allowed_users ||= post.topic.all_allowed_users.reject { |u| not_allowed?(u, post) }
end
def allowed_users(post)
@allowed_users ||= post.topic.allowed_users.reject { |u| not_allowed?(u, post) }
end
def allowed_group_users(post)
@allowed_group_users ||= post.topic.allowed_group_users.reject { |u| not_allowed?(u, post) }
end
def directly_targeted_users(post)
allowed_users(post) - allowed_group_users(post)
end
def indirectly_targeted_users(post)
allowed_group_users(post)
end
def only_allowed_users(users, post)
return users unless post.topic.private_message?
users.select { |u| all_allowed_users(post).include?(u) }
end
def notify_about_reply?(post)
# small actions can be whispers in this case they will have an action code
# we never want to notify on this
post.post_type == Post.types[:regular] ||
(post.post_type == Post.types[:whisper] && post.action_code.nil?)
end
def after_save_post(post, new_record = false)
notified = [post.user, post.last_editor].uniq
DiscourseEvent.trigger(:post_alerter_before_mentions, post, new_record, notified)
# mentions (users/groups)
mentioned_groups, mentioned_users, mentioned_here = extract_mentions(post)
if mentioned_groups || mentioned_users || mentioned_here
mentioned_opts = {}
editor = post.last_editor
if post.last_editor_id != post.user_id
# Mention comes from an edit by someone else, so notification should say who added the mention.
mentioned_opts = {
user_id: editor.id,
original_username: editor.username,
display_username: editor.username,
}
end
if mentioned_users
mentioned_users = only_allowed_users(mentioned_users, post)
mentioned_users = mentioned_users - pm_watching_users(post)
notified += notify_users(mentioned_users - notified, :mentioned, post, mentioned_opts)
end
expand_group_mentions(mentioned_groups, post) do |group, users|
users = only_allowed_users(users, post)
to_notify =
DiscoursePluginRegistry.apply_modifier(
:expand_group_mention_users,
users - notified,
group,
)
notified +=
notify_users(to_notify, :group_mentioned, post, mentioned_opts.merge(group: group))
end
if mentioned_here
users = expand_here_mention(post, exclude_ids: notified.map(&:id))
users = only_allowed_users(users, post)
notified += notify_users(users - notified, :mentioned, post, mentioned_opts)
end
end
DiscourseEvent.trigger(:post_alerter_before_replies, post, new_record, notified)
# replies
reply_to_user = post.reply_notification_target
if new_record && notify_about_reply?(post)
if reply_to_user && !notified.include?(reply_to_user)
notified += notify_non_pm_users(reply_to_user, :replied, post)
end
topic_author = post.topic.user
if topic_author && !notified.include?(topic_author) &&
user_watching_topic?(topic_author, post.topic)
notified += notify_non_pm_users(topic_author, :replied, post)
end
end
DiscourseEvent.trigger(:post_alerter_before_quotes, post, new_record, notified)
# quotes
quoted_users = extract_quoted_users(post)
notified += notify_non_pm_users(quoted_users - notified, :quoted, post)
DiscourseEvent.trigger(:post_alerter_before_linked, post, new_record, notified)
# linked
linked_users = extract_linked_users(post)
notified += notify_non_pm_users(linked_users - notified, :linked, post)
DiscourseEvent.trigger(:post_alerter_before_post, post, new_record, notified)
notified = notified + category_or_tag_muters(post.topic)
if new_record
if post.topic.private_message?
# private messages
notified += notify_pm_users(post, reply_to_user, quoted_users, notified, new_record)
elsif notify_about_reply?(post)
# posts
notified +=
notify_post_users(
post,
notified,
new_record: new_record,
include_category_watchers: false,
include_tag_watchers: false,
)
notified +=
notify_post_users(
post,
notified,
new_record: new_record,
include_topic_watchers: false,
notification_type: :watching_category_or_tag,
)
end
end
sync_group_mentions(post, mentioned_groups)
DiscourseEvent.trigger(:post_alerter_before_first_post, post, new_record, notified)
if new_record && post.post_number == 1
topic = post.topic
if topic.present?
watchers = category_watchers(topic) + tag_watchers(topic) + group_watchers(topic)
# Notify only users who can see the topic
watchers &= topic.all_allowed_users.pluck(:id) if post.topic.private_message?
notified += notify_first_post_watchers(post, watchers, notified)
end
end
DiscourseEvent.trigger(:post_alerter_after_save_post, post, new_record, notified)
end
def group_watchers(topic)
GroupUser.where(
group_id: topic.allowed_groups.pluck(:group_id),
notification_level: GroupUser.notification_levels[:watching_first_post],
).pluck(:user_id)
end
def tag_watchers(topic)
topic
.tag_users
.notification_level_visible([TagUser.notification_levels[:watching_first_post]])
.distinct(:user_id)
.pluck(:user_id)
end
def category_watchers(topic)
topic
.category_users
.where(notification_level: CategoryUser.notification_levels[:watching_first_post])
.pluck(:user_id)
end
def category_or_tag_muters(topic)
user_option_condition_sql_fragment =
if SiteSetting.watched_precedence_over_muted
"uo.watched_precedence_over_muted IS false"
else
"(uo.watched_precedence_over_muted IS NULL OR uo.watched_precedence_over_muted IS false)"
end
user_ids_sql = <<~SQL
SELECT uo.user_id FROM user_options uo
LEFT JOIN topic_users tus ON tus.user_id = uo.user_id AND tus.topic_id = #{topic.id}
LEFT JOIN category_users cu ON cu.user_id = uo.user_id AND cu.category_id = #{topic.category_id.to_i}
LEFT JOIN tag_users tu ON tu.user_id = uo.user_id
JOIN topic_tags tt ON tt.tag_id = tu.tag_id AND tt.topic_id = #{topic.id}
WHERE
(tus.id IS NULL OR tus.notification_level != #{TopicUser.notification_levels[:watching]})
AND (cu.notification_level = #{CategoryUser.notification_levels[:muted]} OR tu.notification_level = #{TagUser.notification_levels[:muted]})
AND #{user_option_condition_sql_fragment}
SQL
User.where("id IN (#{user_ids_sql})")
end
def notify_first_post_watchers(post, user_ids, notified = nil)
return [] if user_ids.blank?
user_ids.uniq!
warn_if_not_sidekiq
# Don't notify the OP and last editor
user_ids -= [post.user_id, post.last_editor_id]
users = User.where(id: user_ids).includes(:do_not_disturb_timings)
users = users.where.not(id: notified.map(&:id)) if notified.present?
DiscourseEvent.trigger(:before_create_notifications_for_users, users, post)
each_user_in_batches(users) do |user|
create_notification(user, Notification.types[:watching_first_post], post)
end
users
end
def sync_group_mentions(post, mentioned_groups)
GroupMention.where(post_id: post.id).destroy_all
return if mentioned_groups.blank?
now = Time.zone.now
# insert_all instead of insert_all! since multiple post_alert jobs might be
# running concurrently
GroupMention.insert_all(
mentioned_groups.map do |group|
{ post_id: post.id, group_id: group.id, created_at: now, updated_at: now }
end,
)
end
def unread_posts(user, topic)
Post
.secured(Guardian.new(user))
.where(
"post_number > COALESCE((
SELECT last_read_post_number FROM topic_users tu
WHERE tu.user_id = ? AND tu.topic_id = ? ),0)",
user.id,
topic.id,
)
.where(
"reply_to_user_id = :user_id
OR exists(SELECT 1 from topic_users tu
WHERE tu.user_id = :user_id AND
tu.topic_id = :topic_id AND
notification_level = :topic_level)
OR exists(SELECT 1 from category_users cu
WHERE cu.user_id = :user_id AND
cu.category_id = :category_id AND
notification_level = :category_level)
OR exists(SELECT 1 from tag_users tu
WHERE tu.user_id = :user_id AND
tu.tag_id IN (SELECT tag_id FROM topic_tags WHERE topic_id = :topic_id) AND
notification_level = :tag_level)",
user_id: user.id,
topic_id: topic.id,
category_id: topic.category_id,
topic_level: TopicUser.notification_levels[:watching],
category_level: CategoryUser.notification_levels[:watching],
tag_level: TagUser.notification_levels[:watching],
)
.where(topic_id: topic.id)
end
def first_unread_post(user, topic)
unread_posts(user, topic).order("post_number").first
end
def unread_count(user, topic)
unread_posts(user, topic).count
end
def destroy_notifications(user, types, topic)
return if user.blank?
return unless Guardian.new(user).can_see?(topic)
User.transaction do
user.notifications.where(notification_type: types, topic_id: topic.id).destroy_all
# Reload so notification counts sync up correctly
user.reload
end
end
NOTIFIABLE_TYPES =
%i[
mentioned
replied
quoted
posted
linked
private_message
group_mentioned
watching_first_post
watching_category_or_tag
event_reminder
event_invitation
].map { |t| Notification.types[t] }
def group_stats(topic)
sql = <<~SQL
SELECT COUNT(*) FROM topics t
JOIN topic_allowed_groups g ON g.group_id = :group_id AND g.topic_id = t.id
LEFT JOIN group_archived_messages a ON a.topic_id = t.id AND a.group_id = g.group_id
WHERE a.id IS NULL AND t.deleted_at is NULL AND t.archetype = 'private_message'
SQL
topic.allowed_groups.map do |g|
{
group_id: g.id,
group_name: g.name,
inbox_count: DB.query_single(sql, group_id: g.id).first.to_i,
}
end
end
def notify_group_summary(user, topic, acting_user_id: nil)
@group_stats ||= {}
stats = (@group_stats[topic.id] ||= group_stats(topic))
return unless stats
group_id = topic.topic_allowed_groups.where(group_id: user.groups).pick(:group_id)
stat = stats.find { |s| s[:group_id] == group_id }
return unless stat
DistributedMutex.synchronize("group_message_notify_#{user.id}") do
if stat[:inbox_count] > 0
Notification.consolidate_or_create!(
notification_type: Notification.types[:group_message_summary],
user_id: user.id,
read: user.id === acting_user_id ? true : false,
data: {
group_id: stat[:group_id],
group_name: stat[:group_name],
inbox_count: stat[:inbox_count],
username: user.username_lower,
}.to_json,
)
else
Notification
.where(user_id: user.id, notification_type: Notification.types[:group_message_summary])
.where("data::json ->> 'group_id' = ?", stat[:group_id].to_s)
.delete_all
end
end
# TODO decide if it makes sense to also publish a desktop notification
end
def should_notify_edit?(notification, post, opts)
notification.created_at < 1.day.ago ||
notification.data_hash["display_username"] !=
(opts[:display_username].presence || post.user.username)
end
def should_notify_like?(user, notification)
if user.user_option.like_notification_frequency ==
UserOption.like_notification_frequency_type[:always]
return true
end
if user.user_option.like_notification_frequency ==
UserOption.like_notification_frequency_type[:first_time_and_daily] &&
notification.created_at < 1.day.ago
return true
end
false
end
def should_notify_previous?(user, post, notification, opts)
case notification.notification_type
when Notification.types[:edited]
should_notify_edit?(notification, post, opts)
when Notification.types[:liked]
should_notify_like?(user, notification)
else
false
end
end
COLLAPSED_NOTIFICATION_TYPES ||= [
Notification.types[:replied],
Notification.types[:posted],
Notification.types[:private_message],
Notification.types[:watching_category_or_tag],
]
def create_notification(user, type, post, opts = {})
opts = @default_opts.merge(opts)
DiscourseEvent.trigger(:before_create_notification, user, type, post, opts)
return if user.blank? || user.bot? || post.blank?
return if (topic = post.topic).blank?
is_liked = type == Notification.types[:liked]
if is_liked &&
user.user_option.like_notification_frequency ==
UserOption.like_notification_frequency_type[:never]
return
end
return if !Guardian.new(user).can_receive_post_notifications?(post)
return if user.staged? && topic.category&.mailinglist_mirror?
notifier_id = opts[:user_id] || post.user_id # xxxxx look at revision history
if notifier_id &&
UserCommScreener.new(
acting_user_id: notifier_id,
target_user_ids: user.id,
).ignoring_or_muting_actor?(user.id)
return
end
# skip if muted on the topic
if TopicUser.where(
topic: topic,
user: user,
notification_level: TopicUser.notification_levels[:muted],
).exists?
return
end
# skip if muted on the group
if group = opts[:group]
if GroupUser.where(
group_id: opts[:group_id],
user_id: user.id,
notification_level: TopicUser.notification_levels[:muted],
).exists?
return
end
end
existing_notifications =
user
.notifications
.order("notifications.id DESC")
.where(topic_id: post.topic_id, post_number: post.post_number)
.limit(10)
# Don't notify the same user about the same type of notification on the same post
existing_notification_of_same_type =
existing_notifications.find { |n| n.notification_type == type }
if existing_notification_of_same_type &&
!should_notify_previous?(user, post, existing_notification_of_same_type, opts)
return
end
# linked, quoted, mentioned, chat_quoted may be suppressed if you already have a reply notification
if [
Notification.types[:quoted],
Notification.types[:linked],
Notification.types[:mentioned],
Notification.types[:chat_quoted],
].include?(type)
if existing_notifications.find { |n| n.notification_type == Notification.types[:replied] }
return
end
end
collapsed = false
if COLLAPSED_NOTIFICATION_TYPES.include?(type)
destroy_notifications(user, COLLAPSED_NOTIFICATION_TYPES, topic)
collapsed = true
end
original_post = post
original_username = opts[:display_username].presence || post.username
if collapsed
post = first_unread_post(user, topic) || post
count = unread_count(user, topic)
if count > 1
I18n.with_locale(user.effective_locale) do
opts[:display_username] = I18n.t("embed.replies", count: count)
end
end
end
UserActionManager.notification_created(original_post, user, type, opts[:acting_user_id])
topic_title = topic.title
# when sending a private message email, keep the original title
if topic.private_message? && modifications = post.revisions.map(&:modifications)
if first_title_modification = modifications.find { |m| m.has_key?("title") }
topic_title = first_title_modification["title"][0]
end
end
notification_data = {
topic_title: topic_title,
original_post_id: original_post.id,
original_post_type: original_post.post_type,
original_username: original_username,
revision_number: opts[:revision_number],
display_username: opts[:display_username] || post.user.username,
}
opts[:custom_data].each { |k, v| notification_data[k] = v } if opts[:custom_data]&.is_a?(Hash)
if group = opts[:group]
notification_data[:group_id] = group.id
notification_data[:group_name] = group.name
end
if opts[:skip_send_email_to]&.include?(user.email)
skip_send_email = true
elsif original_post.via_email && (incoming_email = original_post.incoming_email)
skip_send_email =
incoming_email.to_addresses_split.include?(user.email) ||
incoming_email.cc_addresses_split.include?(user.email)
else
skip_send_email = opts[:skip_send_email]
end
# Create the notification
notification_data =
DiscoursePluginRegistry.apply_modifier(:notification_data, notification_data)
created =
user.notifications.consolidate_or_create!(
notification_type: type,
topic_id: post.topic_id,
post_number: post.post_number,
post_action_id: opts[:post_action_id],
data: notification_data.to_json,
skip_send_email: skip_send_email,
)
if created.id && existing_notifications.empty? && NOTIFIABLE_TYPES.include?(type)
create_notification_alert(
user: user,
post: original_post,
notification_type: type,
username: original_username,
)
end
created.id ? created : nil
end
def create_notification_alert(user:, post:, notification_type:, excerpt: nil, username: nil)
self.class.create_notification_alert(
user: user,
post: post,
notification_type: notification_type,
excerpt: excerpt,
username: username,
)
end
def push_notification(user, payload)
self.class.push_notification(user, payload)
end
def expand_group_mentions(groups, post)
return unless post.user && groups
Group
.mentionable(post.user, include_public: false)
.where(id: groups.map(&:id))
.each do |group|
next if group.user_count >= SiteSetting.max_users_notified_per_group_mention
yield group, group.users
end
end
def expand_here_mention(post, exclude_ids: nil)
posts = Post.where(topic_id: post.topic_id)
posts = posts.where.not(user_id: exclude_ids) if exclude_ids.present?
if post.user.staff?
posts = posts.where(post_type: [Post.types[:regular], Post.types[:whisper]])
else
posts = posts.where(post_type: Post.types[:regular])
end
User.real.where(id: posts.select(:user_id)).limit(SiteSetting.max_here_mentioned)
end
# TODO: Move to post-analyzer?
def extract_mentions(post)
mentions = post.raw_mentions
return if mentions.blank?
groups = Group.where("LOWER(name) IN (?)", mentions)
mentions -= groups.map(&:name).map(&:downcase)
groups = nil if groups.empty?
if mentions.present?
users =
User
.where(username_lower: mentions)
.includes(:do_not_disturb_timings)
.where.not(id: post.user_id)
users = nil if users.empty?
end
# @here can be a user mention and then this feature is disabled
here = mentions.include?(SiteSetting.here_mention) && Guardian.new(post.user).can_mention_here?
[groups, users, here]
end
# TODO: Move to post-analyzer?
# Returns a list of users who were quoted in the post
def extract_quoted_users(post)
usernames =
if SiteSetting.display_name_on_posts && !SiteSetting.prioritize_username_in_ux
post.raw.scan(/username:([[:alnum:]]*)"(?=\])/)
else
post.raw.scan(/\[quote=\"([^,]+),.+\"\]/)
end.uniq.map { |q| q.first.strip.downcase }
User.where.not(id: post.user_id).where(username_lower: usernames)
end
def extract_linked_users(post)
users =
post
.topic_links
.where(reflection: false)
.map do |link|
linked_post = link.link_post
if !linked_post && topic = link.link_topic
linked_post = topic.posts.find_by(post_number: 1)
end
(linked_post && post.user_id != linked_post.user_id && linked_post.user) || nil
end
.compact
DiscourseEvent.trigger(:after_extract_linked_users, users, post)
users
end
# Notify a bunch of users
def notify_non_pm_users(users, type, post, opts = {})
return [] if post.topic&.private_message?
notify_users(users, type, post, opts)
end
def notify_users(users, type, post, opts = {})
users = [users] unless users.is_a?(Array)
users.reject!(&:staged?) if post.topic&.private_message?
warn_if_not_sidekiq
DiscourseEvent.trigger(:before_create_notifications_for_users, users, post)
users.each { |u| create_notification(u, Notification.types[type], post, opts) }
users
end
def pm_watching_users(post)
return [] if !post.topic.private_message?
directly_targeted_users(post).filter do |u|
notification_level = TopicUser.get(post.topic, u)&.notification_level
notification_level == TopicUser.notification_levels[:watching]
end
end
def notify_pm_users(post, reply_to_user, quoted_users, notified, new_record = false)
return [] unless post.topic
warn_if_not_sidekiq
# To simplify things and to avoid IMAP double sync issues, and to cut down
# on emails sent via SMTP, any topic_allowed_users (except those who are
# not_allowed?) for a group that has SMTP enabled will have their notification
# email combined into one and sent via a single group SMTP email with CC addresses.
emails_to_skip_send = email_using_group_smtp_if_configured(post)
# We create notifications for all directly_targeted_users and email those
# who do _not_ have their email addresses in the emails_to_skip_send array
# (which will include all topic allowed users' email addresses if group SMTP
# is enabled).
users = directly_targeted_users(post).reject { |u| notified.include?(u) }
DiscourseEvent.trigger(:before_create_notifications_for_users, users, post)
users.each do |user|
if reply_to_user == user || pm_watching_users(post).include?(user) || user.staged?
create_notification(
user,
Notification.types[:private_message],
post,
skip_send_email_to: emails_to_skip_send,
)
end
end
# Users that are part of all mentioned groups. Emails sent by this notification
# flow will not be sent via group SMTP if it is enabled.
users = indirectly_targeted_users(post).reject { |u| notified.include?(u) }
DiscourseEvent.trigger(:before_create_notifications_for_users, users, post)
users.each do |user|
case TopicUser.get(post.topic, user)&.notification_level
when TopicUser.notification_levels[:watching]
create_pm_notification(user, post, emails_to_skip_send)
when TopicUser.notification_levels[:tracking]
# TopicUser is the canonical source of topic notification levels, except for
# new topics created within a group with default notification level set to
# `watching_first_post`. TopicUser notification level is set to `tracking`
# for these.
if is_replying?(user, reply_to_user, quoted_users) ||
(new_record && group_watched_first_post?(user, post))
create_pm_notification(user, post, emails_to_skip_send)
else
notify_group_summary(user, post.topic)
end
when TopicUser.notification_levels[:regular]
if is_replying?(user, reply_to_user, quoted_users)
create_pm_notification(user, post, emails_to_skip_send)
end
end
end
end
def group_notifying_via_smtp(post)
return if !SiteSetting.enable_smtp || post.post_type != Post.types[:regular]
return if post.topic.allowed_groups.none?
return post.topic.first_smtp_enabled_group if post.topic.allowed_groups.count == 1
topic_incoming_email = post.topic.incoming_email.first
return if topic_incoming_email.blank?
group = Group.find_by_email(topic_incoming_email.to_addresses)
return post.topic.first_smtp_enabled_group if !group&.smtp_enabled
group
end
def email_using_group_smtp_if_configured(post)
emails_to_skip_send = []
group = group_notifying_via_smtp(post)
return emails_to_skip_send if group.blank?
to_address = nil
cc_addresses = []
# We need to use topic_allowed_users here instead of directly_targeted_users
# because we want to make sure the to_address goes to the OP of the topic.
topic_allowed_users_by_age =
post
.topic
.topic_allowed_users
.includes(:user)
.order(:created_at)
.reject { |tau| not_allowed?(tau.user, post) }
return emails_to_skip_send if topic_allowed_users_by_age.empty?
# This should usually be the OP of the topic, unless they are the one
# replying by email (they are excluded by not_allowed? then)
to_address = topic_allowed_users_by_age.first.user.email
cc_addresses = topic_allowed_users_by_age[1..-1].map { |tau| tau.user.email }
email_addresses = [to_address, cc_addresses].flatten
# If any of these email addresses were cc address on the
# incoming email for the target post, do not send them emails (they
# already have been notified by the CC on the email)
if post.incoming_email.present?
cc_addresses = cc_addresses - post.incoming_email.cc_addresses_split
# If the to address is one of the recently added CC addresses, then we
# need to bail early, because otherwise we are sending a notification
# email to the user who was just added by CC. In this case the OP probably
# replied and CC'd some people, and they are the only other topic users.
return if post.incoming_email.cc_addresses_split.include?(to_address)
# We don't want to create an email storm if someone emails the group and
# CC's 50 support addresses from various places, which all then respond
# with auto-responders saying they have received our email. Any auto-generated
# emails should not propagate notifications to anyone else, not even
# the regular topic user notifications.
return email_addresses.dup.uniq if post.incoming_email.is_auto_generated?
end
# Send a single email using group SMTP settings to cut down on the
# number of emails sent via SMTP, also to replicate how support systems
# and group inboxes generally work in other systems.
#
# We need to send this on a delay to allow for editing and finalising
# posts, the same way we do for private_message user emails/notifications.
Jobs.enqueue_in(
SiteSetting.personal_email_time_window_seconds,
:group_smtp_email,
group_id: group.id,
post_id: post.id,
email: to_address,
cc_emails: cc_addresses,
)
# Add the group's email_username into the array, because it is used for
# skip_send_email_to in the case of user private message notifications
# (we do not want the group to be sent any emails from here because it
# will make another email for IMAP to pick up in the group's mailbox)
emails_to_skip_send = email_addresses.dup if email_addresses.any?
emails_to_skip_send << group.email_username
emails_to_skip_send.uniq
end
def notify_post_users(
post,
notified,
group_ids: nil,
include_topic_watchers: true,
include_category_watchers: true,
include_tag_watchers: true,
new_record: false,
notification_type: nil
)
return [] unless post.topic
warn_if_not_sidekiq
condition = +<<~SQL
users.id IN (
SELECT id FROM users WHERE false
/*topic*/
/*category*/
/*tags*/
)
SQL
condition.sub! "/*topic*/", <<~SQL if include_topic_watchers
UNION
SELECT user_id
FROM topic_users
WHERE notification_level = :watching
AND topic_id = :topic_id
SQL
condition.sub! "/*category*/", <<~SQL if include_category_watchers
UNION
SELECT cu.user_id
FROM category_users cu
LEFT JOIN topic_users tu ON tu.user_id = cu.user_id
AND tu.topic_id = :topic_id
WHERE cu.notification_level = :watching
AND cu.category_id = :category_id
AND (tu.user_id IS NULL OR tu.notification_level = :watching)
SQL
tag_ids = post.topic.topic_tags.pluck("topic_tags.tag_id")
condition.sub! "/*tags*/", <<~SQL if include_tag_watchers && tag_ids.present?
UNION
SELECT tag_users.user_id
FROM tag_users
LEFT JOIN topic_users tu ON tu.user_id = tag_users.user_id
AND tu.topic_id = :topic_id
LEFT JOIN tag_group_memberships tgm ON tag_users.tag_id = tgm.tag_id
LEFT JOIN tag_group_permissions tgp ON tgm.tag_group_id = tgp.tag_group_id
LEFT JOIN group_users gu ON gu.user_id = tag_users.user_id
WHERE (
tgp.group_id IS NULL OR
tgp.group_id = gu.group_id OR
tgp.group_id = :everyone_group_id OR
gu.group_id = :staff_group_id
)
AND (tag_users.notification_level = :watching
AND tag_users.tag_id IN (:tag_ids)
AND (tu.user_id IS NULL OR tu.notification_level = :watching))
SQL
notify =
User.where(
condition,
watching: TopicUser.notification_levels[:watching],
topic_id: post.topic_id,
category_id: post.topic.category_id,
tag_ids: tag_ids,
staff_group_id: Group::AUTO_GROUPS[:staff],
everyone_group_id: Group::AUTO_GROUPS[:everyone],
)
if group_ids.present?
notify = notify.joins(:group_users).where("group_users.group_id IN (?)", group_ids)
end
notify = notify.where(staged: false).staff if post.topic.private_message?
exclude_user_ids = notified.map(&:id)
notify = notify.where("users.id NOT IN (?)", exclude_user_ids) if exclude_user_ids.present?
DiscourseEvent.trigger(:before_create_notifications_for_users, notify, post)
already_seen_user_ids =
Set.new(
TopicUser
.where(topic_id: post.topic.id)
.where("last_read_post_number >= ?", post.post_number)
.pluck(:user_id),
)
each_user_in_batches(notify) do |user|
calculated_type =
if !new_record && already_seen_user_ids.include?(user.id)
Notification.types[:edited]
elsif notification_type
Notification.types[notification_type]
else
Notification.types[:posted]
end
opts = {}
opts[:display_username] = post.last_editor.username if calculated_type ==
Notification.types[:edited]
create_notification(user, calculated_type, post, opts)
end
notify
end
def warn_if_not_sidekiq
unless Sidekiq.server?
Rails.logger.warn(
"PostAlerter.#{caller_locations(1, 1)[0].label} was called outside of sidekiq",
)
end
end
private
def each_user_in_batches(users)
# This is race-condition-safe, unlike #find_in_batches
users
.pluck(:id)
.each_slice(USER_BATCH_SIZE) do |user_ids_batch|
User.where(id: user_ids_batch).includes(:do_not_disturb_timings).each { |user| yield(user) }
end
end
def create_pm_notification(user, post, emails_to_skip_send)
create_notification(
user,
Notification.types[:private_message],
post,
skip_send_email_to: emails_to_skip_send,
)
end
def is_replying?(user, reply_to_user, quoted_users)
reply_to_user == user || quoted_users.include?(user)
end
def user_watching_topic?(user, topic)
TopicUser.exists?(
user_id: user.id,
topic_id: topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
end
def group_watched_first_post?(user, post)
post.is_first_post? && group_watchers(post.topic).include?(user.id)
end
end
``` | # frozen_string_literal: true
RSpec::Matchers.define :add_notification do |user, notification_type|
match(notify_expectation_failures: true) do |actual|
notifications = user.notifications
before = notifications.count
actual.call
expect(notifications.count).to eq(before + 1),
"expected 1 new notification, got #{notifications.count - before}"
last_notification_type = notifications.last.notification_type
expect(last_notification_type).to eq(Notification.types[notification_type]),
"expected notification type to be '#{notification_type}', got '#{Notification.types.key(last_notification_type)}'"
end
match_when_negated do |actual|
expect { actual.call }.to_not change {
user.notifications.where(notification_type: Notification.types[notification_type]).count
}
end
supports_block_expectations
end
RSpec::Matchers.define_negated_matcher :not_add_notification, :add_notification
RSpec.describe PostAlerter do
fab!(:category)
fab!(:topic)
fab!(:post)
fab!(:private_message_topic)
fab!(:private_message_topic_post1) { Fabricate(:post, topic: private_message_topic) }
fab!(:private_message_topic_post2) { Fabricate(:post, topic: private_message_topic) }
fab!(:group)
fab!(:admin)
fab!(:evil_trout)
fab!(:coding_horror)
fab!(:walterwhite) { Fabricate(:walter_white) }
fab!(:user)
fab!(:tl2_user) { Fabricate(:user, trust_level: TrustLevel[2]) }
fab!(:private_category) do
Fabricate(
:private_category,
group: group,
email_in: "[email protected]",
email_in_allow_strangers: true,
)
end
def create_post_with_alerts(args = {})
post = Fabricate(:post, args)
PostAlerter.post_created(post)
end
def setup_push_notification_subscription_for(user:)
2.times do |i|
UserApiKey.create!(
user_id: user.id,
client_id: "xxx#{i}",
application_name: "iPhone#{i}",
scopes: ["notifications"].map { |name| UserApiKeyScope.new(name: name) },
push_url: "https://site2.com/push",
)
end
end
context "with private message" do
it "notifies for pms correctly" do
pm = Fabricate(:topic, archetype: "private_message", category_id: nil)
op = Fabricate(:post, user: pm.user)
pm.allowed_users << pm.user
PostAlerter.post_created(op)
reply = Fabricate(:post, user: pm.user, topic: pm, reply_to_post_number: 1)
PostAlerter.post_created(reply)
reply2 = Fabricate(:post, topic: pm, reply_to_post_number: 1)
PostAlerter.post_created(reply2)
# we get a green notification for a reply
expect(Notification.where(user_id: pm.user_id).pick(:notification_type)).to eq(
Notification.types[:private_message],
)
TopicUser.change(
pm.user_id,
pm.id,
notification_level: TopicUser.notification_levels[:tracking],
)
Notification.destroy_all
reply3 = Fabricate(:post, topic: pm)
PostAlerter.post_created(reply3)
# no notification cause we are tracking
expect(Notification.where(user_id: pm.user_id).count).to eq(0)
Notification.destroy_all
reply4 = Fabricate(:post, topic: pm, reply_to_post_number: 1)
PostAlerter.post_created(reply4)
# yes notification cause we were replied to
expect(Notification.where(user_id: pm.user_id).count).to eq(1)
end
it "notifies about private message even if direct mention" do
pm = Fabricate(:topic, archetype: "private_message", category_id: nil)
op =
Fabricate(:post, topic: pm, user: pm.user, raw: "Hello @#{user.username}, nice to meet you")
pm.allowed_users << pm.user
pm.allowed_users << user
TopicUser.create!(
user_id: user.id,
topic_id: pm.id,
notification_level: TopicUser.notification_levels[:watching],
)
PostAlerter.post_created(op)
expect(Notification.where(user_id: user.id).pick(:notification_type)).to eq(
Notification.types[:private_message],
)
end
context "with group inboxes" do
fab!(:user1) { Fabricate(:user) }
fab!(:user2) { Fabricate(:user) }
fab!(:group) do
Fabricate(:group, users: [user2], name: "TestGroup", default_notification_level: 2)
end
fab!(:watching_first_post_group) do
Fabricate(
:group,
name: "some_group",
users: [evil_trout, coding_horror],
messageable_level: Group::ALIAS_LEVELS[:everyone],
default_notification_level: NotificationLevels.all[:watching_first_post],
)
end
fab!(:pm) do
Fabricate(:topic, archetype: "private_message", category_id: nil, allowed_groups: [group])
end
fab!(:op) { Fabricate(:post, user: pm.user, topic: pm) }
it "triggers :before_create_notifications_for_users" do
pm.allowed_users << user1
events = DiscourseEvent.track_events { PostAlerter.post_created(op) }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[user1], op],
)
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[user2], op],
)
end
it "triggers group summary notification" do
Jobs.run_immediately!
TopicUser.change(
user2.id,
pm.id,
notification_level: TopicUser.notification_levels[:tracking],
)
PostAlerter.post_created(op)
group_summary_notification = Notification.where(user_id: user2.id)
expect(group_summary_notification.count).to eq(1)
expect(group_summary_notification.first.notification_type).to eq(
Notification.types[:group_message_summary],
)
notification_payload = JSON.parse(group_summary_notification.first.data)
expect(notification_payload["group_name"]).to eq(group.name)
expect(notification_payload["inbox_count"]).to eq(1)
# archiving the only PM clears the group summary notification
GroupArchivedMessage.archive!(group.id, pm)
expect(Notification.where(user_id: user2.id)).to be_blank
# moving to inbox the only PM restores the group summary notification
GroupArchivedMessage.move_to_inbox!(group.id, pm)
group_summary_notification = Notification.where(user_id: user2.id)
expect(group_summary_notification.first.notification_type).to eq(
Notification.types[:group_message_summary],
)
updated_payload = JSON.parse(group_summary_notification.first.data)
expect(updated_payload["group_name"]).to eq(group.name)
expect(updated_payload["inbox_count"]).to eq(1)
# adding a second PM updates the count
pm2 =
Fabricate(:topic, archetype: "private_message", category_id: nil, allowed_groups: [group])
op2 = Fabricate(:post, user: pm2.user, topic: pm2)
TopicUser.change(
user2.id,
pm2.id,
notification_level: TopicUser.notification_levels[:tracking],
)
PostAlerter.post_created(op2)
group_summary_notification = Notification.where(user_id: user2.id)
updated_payload = JSON.parse(group_summary_notification.first.data)
expect(updated_payload["group_name"]).to eq(group.name)
expect(updated_payload["inbox_count"]).to eq(2)
# archiving the second PM quietly updates the group summary count for the acting user
GroupArchivedMessage.archive!(group.id, pm2, acting_user_id: user2.id)
group_summary_notification = Notification.where(user_id: user2.id)
expect(group_summary_notification.first.read).to eq(true)
updated_payload = JSON.parse(group_summary_notification.first.data)
expect(updated_payload["inbox_count"]).to eq(1)
# moving to inbox the second PM quietly updates the group summary count for the acting user
GroupArchivedMessage.move_to_inbox!(group.id, pm2, acting_user_id: user2.id)
group_summary_notification = Notification.where(user_id: user2.id)
expect(group_summary_notification.first.read).to eq(true)
updated_payload = JSON.parse(group_summary_notification.first.data)
expect(updated_payload["group_name"]).to eq(group.name)
expect(updated_payload["inbox_count"]).to eq(2)
end
it "updates the consolidated group summary inbox count and bumps the notification" do
user2.update!(last_seen_at: 5.minutes.ago)
TopicUser.change(
user2.id,
pm.id,
notification_level: TopicUser.notification_levels[:tracking],
)
PostAlerter.post_created(op)
starting_count =
Notification
.where(user_id: user2.id, notification_type: Notification.types[:group_message_summary])
.pluck("data::json ->> 'inbox_count'")
.last
.to_i
another_pm =
Fabricate(:topic, archetype: "private_message", category_id: nil, allowed_groups: [group])
another_post = Fabricate(:post, user: another_pm.user, topic: another_pm)
TopicUser.change(
user2.id,
another_pm.id,
notification_level: TopicUser.notification_levels[:tracking],
)
message_data =
MessageBus
.track_publish("/notification/#{user2.id}") { PostAlerter.post_created(another_post) }
.first
.data
expect(Notification.where(user: user2).count).to eq(1)
expect(message_data.dig(:last_notification, :notification, :data, :inbox_count)).to eq(
starting_count + 1,
)
expect(message_data[:unread_notifications]).to eq(1)
end
it "sends a PM notification when replying to a member tracking the topic" do
group.add(user1)
post = Fabricate(:post, topic: pm, user: user1)
TopicUser.change(
user1.id,
pm.id,
notification_level: TopicUser.notification_levels[:tracking],
)
expect {
create_post_with_alerts(
raw: "this is a reply to your post...",
topic: pm,
user: user2,
reply_to_post_number: post.post_number,
)
}.to change(
user1.notifications.where(notification_type: Notification.types[:private_message]),
:count,
).by(1)
end
it "notifies a group member if someone replies to their post" do
group.add(user1)
post = Fabricate(:post, topic: pm, user: user1)
TopicUser.change(
user1.id,
pm.id,
notification_level: TopicUser.notification_levels[:regular],
)
expect {
create_post_with_alerts(
raw: "this is a reply to your post...",
topic: pm,
user: user2,
reply_to_post_number: post.post_number,
)
}.to change(user1.notifications, :count).by(1)
end
it "notifies a group member if someone quotes their post" do
group.add(user1)
post = Fabricate(:post, topic: pm, user: user1)
TopicUser.change(
user1.id,
pm.id,
notification_level: TopicUser.notification_levels[:regular],
)
quote_raw = <<~MD
[quote="#{user1.username}, post:1, topic:#{pm.id}"]#{post.raw}[/quote]
MD
expect { create_post_with_alerts(raw: quote_raw, topic: pm, user: user2) }.to change(
user1.notifications,
:count,
).by(1)
end
it "Doesn't notify non-admin users when their post is quoted inside a whisper" do
group.add(admin)
TopicUser.change(
user2.id,
pm.id,
notification_level: TopicUser.notification_levels[:regular],
)
quote_raw = <<~MD
[quote="#{user2.username}, post:1, topic:#{pm.id}"]#{op.raw}[/quote]
MD
expect {
create_post_with_alerts(
raw: quote_raw,
topic: pm,
user: admin,
post_type: Post.types[:whisper],
)
}.not_to change(user2.notifications, :count)
end
context "with watching_first_post notification level" do
it "notifies group members of first post" do
post =
PostCreator.create!(
user,
title: "Hi there, welcome to my topic",
raw: "This is my awesome message",
archetype: Archetype.private_message,
target_group_names: watching_first_post_group.name,
)
PostAlerter.new.after_save_post(post, true)
expect(
evil_trout
.notifications
.where(notification_type: Notification.types[:private_message])
.count,
).to eq(1)
expect(
coding_horror
.notifications
.where(notification_type: Notification.types[:private_message])
.count,
).to eq(1)
end
it "doesn't notify group members of replies" do
post =
PostCreator.create!(
user,
title: "Hi there, welcome to my topic",
raw: "This is my awesome message",
archetype: Archetype.private_message,
target_group_names: watching_first_post_group.name,
)
expect(
evil_trout
.notifications
.where(notification_type: Notification.types[:private_message])
.count,
).to eq(0)
expect(
coding_horror
.notifications
.where(notification_type: Notification.types[:private_message])
.count,
).to eq(0)
PostAlerter.new.after_save_post(post, true)
expect(
evil_trout
.notifications
.where(notification_type: Notification.types[:private_message])
.count,
).to eq(1)
expect(
coding_horror
.notifications
.where(notification_type: Notification.types[:private_message])
.count,
).to eq(1)
reply =
Fabricate(
:post,
raw: "Reply to PM",
user: user,
topic: post.topic,
reply_to_post_number: post.post_number,
)
expect do PostAlerter.new.after_save_post(reply, false) end.to_not change {
Notification.count
}
end
end
end
end
context "with unread" do
it "does not return whispers as unread posts" do
_whisper =
Fabricate(
:post,
raw: "this is a whisper post",
user: admin,
topic: post.topic,
reply_to_post_number: post.post_number,
post_type: Post.types[:whisper],
)
expect(PostAlerter.new.first_unread_post(post.user, post.topic)).to be_blank
end
end
context "with edits" do
it "notifies correctly on edits" do
Jobs.run_immediately!
PostActionNotifier.enable
post = Fabricate(:post, raw: "I love waffles")
expect do post.revise(admin, raw: "I made a revision") end.to add_notification(
post.user,
:edited,
)
# lets also like this post which should trigger a notification
expect do
PostActionCreator.new(admin, post, PostActionType.types[:like]).perform
end.to add_notification(post.user, :liked)
# skip this notification cause we already notified on an edit by the same user
# in the previous edit
freeze_time 2.hours.from_now
expect do post.revise(admin, raw: "I made another revision") end.to_not change {
Notification.count
}
# this we do not skip cause 1 day has passed
freeze_time 23.hours.from_now
expect do post.revise(admin, raw: "I made another revision xyz") end.to add_notification(
post.user,
:edited,
)
expect do post.revise(Fabricate(:admin), raw: "I made a revision") end.to add_notification(
post.user,
:edited,
)
freeze_time 2.hours.from_now
expect do post.revise(admin, raw: "I made another revision") end.to add_notification(
post.user,
:edited,
)
end
it "notifies flaggers when flagged post gets unhidden by edit" do
post = create_post
PostActionNotifier.enable
Reviewable.set_priorities(high: 4.0)
SiteSetting.hide_post_sensitivity = Reviewable.sensitivities[:low]
PostActionCreator.spam(evil_trout, post)
PostActionCreator.spam(walterwhite, post)
post.reload
expect(post.hidden).to eq(true)
expect { post.revise(post.user, raw: post.raw + " ha I edited it ") }.to add_notification(
evil_trout,
:edited,
).and add_notification(walterwhite, :edited)
post.reload
expect(post.hidden).to eq(false)
notification = walterwhite.notifications.last
expect(notification.topic_id).to eq(post.topic.id)
expect(notification.post_number).to eq(post.post_number)
expect(notification.data_hash["display_username"]).to eq(post.user.username)
PostActionCreator.create(coding_horror, post, :spam)
PostActionCreator.create(walterwhite, post, :off_topic)
post.reload
expect(post.hidden).to eq(true)
expect {
post.revise(post.user, raw: post.raw + " ha I edited it again ")
}.to not_add_notification(evil_trout, :edited).and not_add_notification(
coding_horror,
:edited,
).and not_add_notification(walterwhite, :edited)
end
end
context "with quotes" do
fab!(:category)
fab!(:topic) { Fabricate(:topic, category: category) }
it "does not notify for muted users" do
post = Fabricate(:post, raw: '[quote="Eviltrout, post:1"]whatup[/quote]', topic: topic)
MutedUser.create!(user_id: evil_trout.id, muted_user_id: post.user_id)
expect { PostAlerter.post_created(post) }.not_to change(evil_trout.notifications, :count)
end
it "does not notify for ignored users" do
post = Fabricate(:post, raw: '[quote="EvilTrout, post:1"]whatup[/quote]', topic: topic)
Fabricate(:ignored_user, user: evil_trout, ignored_user: post.user)
expect { PostAlerter.post_created(post) }.not_to change(evil_trout.notifications, :count)
end
it "does not notify for users with new reply notification" do
post = Fabricate(:post, raw: '[quote="eviltRout, post:1"]whatup[/quote]', topic: topic)
notification =
Notification.create!(
topic: post.topic,
post_number: post.post_number,
read: false,
notification_type: Notification.types[:replied],
user: evil_trout,
data: { topic_title: "test topic" }.to_json,
)
expect { PostAlerter.post_edited(post) }.not_to change(evil_trout.notifications, :count)
notification.destroy
expect { PostAlerter.post_edited(post) }.to change(evil_trout.notifications, :count).by(1)
end
it "does not collapse quote notifications" do
expect {
2.times do
create_post_with_alerts(raw: '[quote="eviltrout, post:1"]whatup[/quote]', topic: topic)
end
}.to change(evil_trout.notifications, :count).by(2)
end
it "won't notify the user a second time on revision" do
p1 = create_post_with_alerts(raw: '[quote="Evil Trout, post:1"]whatup[/quote]')
expect {
p1.revise(p1.user, raw: '[quote="Evil Trout, post:1"]whatup now?[/quote]')
}.not_to change(evil_trout.notifications, :count)
end
it "doesn't notify the poster" do
topic = create_post_with_alerts.topic
expect {
Fabricate(
:post,
topic: topic,
user: topic.user,
raw: '[quote="Bruce Wayne, post:1"]whatup[/quote]',
)
}.not_to change(topic.user.notifications, :count)
end
it "triggers :before_create_notifications_for_users" do
post = Fabricate(:post, raw: '[quote="eviltrout, post:1"]whatup[/quote]')
events = DiscourseEvent.track_events { PostAlerter.post_created(post) }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[evil_trout], post],
)
end
context "with notifications when prioritizing full names" do
before do
SiteSetting.prioritize_username_in_ux = false
SiteSetting.display_name_on_posts = true
end
it "sends to correct user" do
quote = <<~MD
[quote="#{evil_trout.name}, post:1, username:#{evil_trout.username}"]whatup[/quote]
MD
expect { create_post_with_alerts(raw: quote, topic: topic) }.to change(
evil_trout.notifications,
:count,
).by(1)
end
it "sends to correct users when nested quotes with multiple users" do
quote = <<~MD
[quote="#{evil_trout.name}, post:1, username:#{evil_trout.username}"]this [quote="#{walterwhite.name}, post:2, username:#{walterwhite.username}"]whatup[/quote][/quote]
MD
expect { create_post_with_alerts(raw: quote, topic: topic) }.to change(
evil_trout.notifications,
:count,
).by(1).and change(walterwhite.notifications, :count).by(1)
end
it "sends to correct users when multiple quotes" do
user = Fabricate(:user)
quote = <<~MD
[quote="#{evil_trout.name}, post:1, username:#{evil_trout.username}"]"username:#{user.username}" [/quote]/n [quote="#{walterwhite.name}, post:2, username:#{walterwhite.username}"]whatup[/quote]
MD
expect { create_post_with_alerts(raw: quote, topic: topic) }.to change(
evil_trout.notifications,
:count,
).by(1).and change(walterwhite.notifications, :count).by(1).and not_change(
user.notifications,
:count,
)
end
it "sends to correct user when user has a full name that matches another user's username" do
user_with_matching_full_name = Fabricate(:user, name: evil_trout.username)
quote = <<~MD
[quote="#{user_with_matching_full_name.name}, post:1, username:#{user_with_matching_full_name.username}"]this [/quote]
MD
expect { create_post_with_alerts(raw: quote, topic: topic) }.to change(
user_with_matching_full_name.notifications,
:count,
).by(1).and not_change(evil_trout.notifications, :count)
end
end
end
context "with linked" do
let(:post1) { create_post }
let(:user) { post1.user }
let(:linking_post) { create_post(raw: "my magic topic\n##{Discourse.base_url}#{post1.url}") }
before { Jobs.run_immediately! }
it "will notify correctly on linking" do
linking_post
expect(user.notifications.count).to eq(1)
watcher = Fabricate(:user)
TopicUser.create!(
user_id: watcher.id,
topic_id: topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
create_post(
topic_id: topic.id,
user: user,
raw: "my magic topic\n##{Discourse.base_url}#{post1.url}",
)
user.reload
expect(user.notifications.where(notification_type: Notification.types[:linked]).count).to eq(
1,
)
expect(watcher.notifications.count).to eq(1)
# don't notify on reflection
post1.reload
expect(PostAlerter.new.extract_linked_users(post1).length).to eq(0)
end
it "triggers :before_create_notifications_for_users" do
events = DiscourseEvent.track_events { linking_post }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[user], linking_post],
)
end
it "doesn't notify the linked user if the user is staged and the category is restricted and allows strangers" do
staged_user = Fabricate(:staged)
group_member = Fabricate(:user)
group.add(group_member)
staged_user_post = create_post(user: staged_user, category: private_category)
create_post(
user: group_member,
category: private_category,
raw: "my magic topic\n##{Discourse.base_url}#{staged_user_post.url}",
)
staged_user.reload
expect(
staged_user.notifications.where(notification_type: Notification.types[:linked]).count,
).to eq(0)
end
end
context "with @here" do
let(:post) do
create_post_with_alerts(raw: "Hello @here how are you?", user: tl2_user, topic: topic)
end
fab!(:other_post) { Fabricate(:post, topic: topic) }
before { Jobs.run_immediately! }
it "does not notify unrelated users" do
expect { post }.not_to change(evil_trout.notifications, :count)
end
it "does not work if user here exists" do
Fabricate(:user, username: SiteSetting.here_mention)
expect { post }.not_to change(other_post.user.notifications, :count)
end
it "notifies users who replied" do
post2 = Fabricate(:post, topic: topic, post_type: Post.types[:whisper])
post3 = Fabricate(:post, topic: topic)
expect { post }.to change(other_post.user.notifications, :count).by(1).and not_change(
post2.user.notifications,
:count,
).and change(post3.user.notifications, :count).by(1)
end
it "notifies users who whispered" do
post2 = Fabricate(:post, topic: topic, post_type: Post.types[:whisper])
post3 = Fabricate(:post, topic: topic)
tl2_user.grant_admin!
expect { post }.to change(other_post.user.notifications, :count).by(1).and change(
post2.user.notifications,
:count,
).by(1).and change(post3.user.notifications, :count).by(1)
end
it "notifies only last max_here_mentioned users" do
SiteSetting.max_here_mentioned = 2
3.times { Fabricate(:post, topic: topic) }
expect { post }.to change { Notification.count }.by(2)
end
end
context "with @group mentions" do
fab!(:group) do
Fabricate(:group, name: "group", mentionable_level: Group::ALIAS_LEVELS[:everyone])
end
let(:post) { create_post_with_alerts(raw: "Hello @group how are you?") }
before { group.add(evil_trout) }
it "notifies users correctly" do
expect { post }.to change(evil_trout.notifications, :count).by(1)
expect(GroupMention.count).to eq(1)
Fabricate(:group, name: "group-alt", mentionable_level: Group::ALIAS_LEVELS[:everyone])
expect {
create_post_with_alerts(raw: "Hello, @group-alt should not trigger a notification?")
}.not_to change(evil_trout.notifications, :count)
expect(GroupMention.count).to eq(2)
group.update_columns(mentionable_level: Group::ALIAS_LEVELS[:members_mods_and_admins])
expect { create_post_with_alerts(raw: "Hello @group you are not mentionable") }.not_to change(
evil_trout.notifications,
:count,
)
expect(GroupMention.count).to eq(3)
group.update_columns(mentionable_level: Group::ALIAS_LEVELS[:owners_mods_and_admins])
group.add_owner(user)
expect {
create_post_with_alerts(raw: "Hello @group the owner can mention you", user: user)
}.to change(evil_trout.notifications, :count).by(1)
expect(GroupMention.count).to eq(4)
end
it "takes private mention as precedence" do
expect {
create_post_with_alerts(raw: "Hello @group and @eviltrout, nice to meet you")
}.to change(evil_trout.notifications, :count).by(1)
expect(evil_trout.notifications.last.notification_type).to eq(Notification.types[:mentioned])
end
it "triggers :before_create_notifications_for_users" do
events = DiscourseEvent.track_events { post }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[evil_trout], post],
)
end
end
context "with @mentions" do
let(:mention_post) { create_post_with_alerts(user: user, raw: "Hello @eviltrout") }
let(:topic) { mention_post.topic }
before { Jobs.run_immediately! }
it "notifies a user" do
expect { mention_post }.to change(evil_trout.notifications, :count).by(1)
end
it "won't notify the user a second time on revision" do
mention_post
expect {
mention_post.revise(
mention_post.user,
raw: "New raw content that still mentions @eviltrout",
)
}.not_to change(evil_trout.notifications, :count)
end
it "doesn't notify the user who created the topic in regular mode" do
topic.notify_regular!(user)
mention_post
expect {
create_post_with_alerts(user: user, raw: "second post", topic: topic)
}.not_to change(user.notifications, :count)
end
it "triggers :before_create_notifications_for_users" do
events = DiscourseEvent.track_events { mention_post }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[evil_trout], mention_post],
)
end
it "notification comes from editor if mention is added later" do
post = create_post_with_alerts(user: user, raw: "No mention here.")
expect { post.revise(admin, raw: "Mention @eviltrout in this edit.") }.to change(
evil_trout.notifications,
:count,
)
n = evil_trout.notifications.last
expect(n.data_hash["original_username"]).to eq(admin.username)
end
it "doesn't notify the last post editor if they mention themselves" do
post = create_post_with_alerts(user: user, raw: "Post without a mention.")
expect { post.revise(evil_trout, raw: "O hai, @eviltrout!") }.not_to change(
evil_trout.notifications,
:count,
)
end
fab!(:alice) { Fabricate(:user, username: "alice") }
fab!(:bob) { Fabricate(:user, username: "bob") }
fab!(:carol) { Fabricate(:admin, username: "carol") }
fab!(:dave) { Fabricate(:user, username: "dave") }
fab!(:eve) { Fabricate(:user, username: "eve") }
fab!(:group) do
Fabricate(:group, name: "group", mentionable_level: Group::ALIAS_LEVELS[:everyone])
end
before { group.bulk_add([alice.id, eve.id]) }
def create_post_with_alerts(args = {})
post = Fabricate(:post, args)
PostAlerter.post_created(post)
end
def set_topic_notification_level(user, topic, level_name)
TopicUser.change(
user.id,
topic.id,
notification_level: TopicUser.notification_levels[level_name],
)
end
context "with topic" do
fab!(:topic) { Fabricate(:topic, user: alice) }
%i[watching tracking regular].each do |notification_level|
context "when notification level is '#{notification_level}'" do
before { set_topic_notification_level(alice, topic, notification_level) }
it "notifies about @username mention" do
args = { user: bob, topic: topic, raw: "Hello @alice" }
expect { create_post_with_alerts(args) }.to add_notification(alice, :mentioned)
end
end
end
context "when notification level is 'muted'" do
before { set_topic_notification_level(alice, topic, :muted) }
it "does not notify about @username mention" do
args = { user: bob, topic: topic, raw: "Hello @alice" }
expect { create_post_with_alerts(args) }.to_not add_notification(alice, :mentioned)
end
end
end
context "with message to users" do
fab!(:pm_topic) do
Fabricate(
:private_message_topic,
user: alice,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: alice),
Fabricate.build(:topic_allowed_user, user: bob),
Fabricate.build(:topic_allowed_user, user: Discourse.system_user),
],
)
end
context "when user is part of conversation" do
%i[watching tracking regular].each do |notification_level|
context "when notification level is '#{notification_level}'" do
before { set_topic_notification_level(alice, pm_topic, notification_level) }
let(:expected_notification) do
notification_level == :watching ? :private_message : :mentioned
end
it "notifies about @username mention" do
args = { user: bob, topic: pm_topic, raw: "Hello @alice" }
expect { create_post_with_alerts(args) }.to add_notification(
alice,
expected_notification,
)
end
it "notifies about @username mentions by non-human users" do
args = { user: Discourse.system_user, topic: pm_topic, raw: "Hello @alice" }
expect { create_post_with_alerts(args) }.to add_notification(
alice,
expected_notification,
)
end
it "notifies about @group mention when allowed user is part of group" do
args = { user: bob, topic: pm_topic, raw: "Hello @group" }
expect { create_post_with_alerts(args) }.to add_notification(alice, :group_mentioned)
end
end
end
context "when notification level is 'muted'" do
before { set_topic_notification_level(alice, pm_topic, :muted) }
it "does not notify about @username mention" do
args = { user: bob, topic: pm_topic, raw: "Hello @alice" }
expect { create_post_with_alerts(args) }.to_not add_notification(alice, :mentioned)
end
end
end
context "when user is not part of conversation" do
it "does not notify about @username mention even though mentioned user is an admin" do
args = { user: bob, topic: pm_topic, raw: "Hello @carol" }
expect { create_post_with_alerts(args) }.to_not add_notification(carol, :mentioned)
end
it "does not notify about @username mention by non-human user even though mentioned user is an admin" do
args = { user: Discourse.system_user, topic: pm_topic, raw: "Hello @carol" }
expect { create_post_with_alerts(args) }.to_not add_notification(carol, :mentioned)
end
it "does not notify about @username mention when mentioned user is not allowed to see message" do
args = { user: bob, topic: pm_topic, raw: "Hello @dave" }
expect { create_post_with_alerts(args) }.to_not add_notification(dave, :mentioned)
end
it "does not notify about @group mention when user is not an allowed user" do
args = { user: bob, topic: pm_topic, raw: "Hello @group" }
expect { create_post_with_alerts(args) }.to_not add_notification(eve, :group_mentioned)
end
end
end
context "with message to group" do
fab!(:some_group) do
Fabricate(:group, name: "some_group", mentionable_level: Group::ALIAS_LEVELS[:everyone])
end
fab!(:pm_topic) do
Fabricate(
:private_message_topic,
user: alice,
topic_allowed_groups: [Fabricate.build(:topic_allowed_group, group: group)],
topic_allowed_users: [Fabricate.build(:topic_allowed_user, user: Discourse.system_user)],
)
end
before { some_group.bulk_add([alice.id, carol.id]) }
context "when group is part of conversation" do
%i[watching tracking regular].each do |notification_level|
context "when notification level is '#{notification_level}'" do
before { set_topic_notification_level(alice, pm_topic, notification_level) }
it "notifies about @group mention" do
args = { user: bob, topic: pm_topic, raw: "Hello @group" }
expect { create_post_with_alerts(args) }.to add_notification(alice, :group_mentioned)
end
it "notifies about @group mentions by non-human users" do
args = { user: Discourse.system_user, topic: pm_topic, raw: "Hello @group" }
expect { create_post_with_alerts(args) }.to add_notification(alice, :group_mentioned)
end
it "notifies about @username mention when user belongs to allowed group" do
args = { user: bob, topic: pm_topic, raw: "Hello @alice" }
expect { create_post_with_alerts(args) }.to add_notification(alice, :mentioned)
end
end
end
context "when notification level is 'muted'" do
before { set_topic_notification_level(alice, pm_topic, :muted) }
it "does not notify about @group mention" do
args = { user: bob, topic: pm_topic, raw: "Hello @group" }
expect { create_post_with_alerts(args) }.to_not add_notification(
alice,
:group_mentioned,
)
end
end
end
context "when group is not part of conversation" do
it "does not notify about @group mention even though mentioned user is an admin" do
args = { user: bob, topic: pm_topic, raw: "Hello @some_group" }
expect { create_post_with_alerts(args) }.to_not add_notification(carol, :group_mentioned)
end
it "does not notify about @group mention by non-human user even though mentioned user is an admin" do
args = { user: Discourse.system_user, topic: pm_topic, raw: "Hello @some_group" }
expect { create_post_with_alerts(args) }.to_not add_notification(carol, :group_mentioned)
end
it "does not notify about @username mention when user doesn't belong to allowed group" do
args = { user: bob, topic: pm_topic, raw: "Hello @dave" }
expect { create_post_with_alerts(args) }.to_not add_notification(dave, :mentioned)
end
end
end
end
describe ".create_notification" do
fab!(:topic) { Fabricate(:private_message_topic, user: user, created_at: 1.hour.ago) }
fab!(:post) { Fabricate(:post, topic: topic, created_at: 1.hour.ago) }
let(:type) { Notification.types[:private_message] }
it "creates a notification for PMs" do
post.revise(user, { raw: "This is the revised post" }, revised_at: Time.zone.now)
expect { PostAlerter.new.create_notification(user, type, post) }.to change {
user.notifications.count
}.by(1)
expect(user.notifications.last.data_hash["topic_title"]).to eq(topic.title)
end
it "keeps the original title for PMs" do
original_title = topic.title
post.revise(user, { title: "This is the revised title" }, revised_at: Time.now)
expect { PostAlerter.new.create_notification(user, type, post) }.to change {
user.notifications.count
}.by(1)
expect(user.notifications.last.data_hash["topic_title"]).to eq(original_title)
end
it "triggers :pre_notification_alert" do
events = DiscourseEvent.track_events { PostAlerter.new.create_notification(user, type, post) }
payload = {
notification_type: type,
post_number: post.post_number,
topic_title: post.topic.title,
topic_id: post.topic.id,
excerpt: post.excerpt(400, text_entities: true, strip_links: true, remap_emoji: true),
username: post.username,
post_url: post.url,
}
expect(events).to include(event_name: :pre_notification_alert, params: [user, payload])
end
it "does not alert when revising and changing notification type" do
PostAlerter.new.create_notification(user, type, post)
post.revise(
user,
{ raw: "Editing post to fake include a mention of @eviltrout" },
revised_at: Time.now,
)
events =
DiscourseEvent.track_events do
PostAlerter.new.create_notification(user, Notification.types[:mentioned], post)
end
payload = {
notification_type: type,
post_number: post.post_number,
topic_title: post.topic.title,
topic_id: post.topic.id,
excerpt: post.excerpt(400, text_entities: true, strip_links: true, remap_emoji: true),
username: post.username,
post_url: post.url,
}
expect(events).not_to include(event_name: :pre_notification_alert, params: [user, payload])
end
it "triggers :before_create_notification" do
type = Notification.types[:private_message]
events =
DiscourseEvent.track_events do
PostAlerter.new.create_notification(user, type, post, { revision_number: 1 })
end
expect(events).to include(
event_name: :before_create_notification,
params: [user, type, post, { revision_number: 1 }],
)
end
it "applies modifiers to notification_data" do
Plugin::Instance
.new
.register_modifier(:notification_data) do |notification_data|
notification_data[:silly_key] = "silly value"
notification_data
end
notification = PostAlerter.new.create_notification(user, type, post)
expect(notification.data_hash[:silly_key]).to eq("silly value")
DiscoursePluginRegistry.clear_modifiers!
end
end
describe ".push_notification" do
let(:mention_post) { create_post_with_alerts(user: user, raw: "Hello @eviltrout :heart:") }
let(:topic) { mention_post.topic }
before do
SiteSetting.allowed_user_api_push_urls = "https://site.com/push|https://site2.com/push"
setup_push_notification_subscription_for(user: evil_trout)
end
describe "DiscoursePluginRegistry#push_notification_filters" do
it "sends push notifications when all filters pass" do
Plugin::Instance.new.register_push_notification_filter { |user, payload| true }
expect { mention_post }.to change { Jobs::PushNotification.jobs.count }.by(1)
DiscoursePluginRegistry.reset!
end
it "does not send push notifications when a filters returns false" do
Plugin::Instance.new.register_push_notification_filter { |user, payload| false }
expect { mention_post }.not_to change { Jobs::PushNotification.jobs.count }
events = DiscourseEvent.track_events { mention_post }
expect(events.find { |event| event[:event_name] == :push_notification }).not_to be_present
DiscoursePluginRegistry.reset!
end
end
it "triggers the push notification event" do
events = DiscourseEvent.track_events { mention_post }
push_notification_event = events.find { |event| event[:event_name] == :push_notification }
expect(push_notification_event).to be_present
expect(push_notification_event[:params][0].username).to eq("eviltrout")
expect(push_notification_event[:params][1][:username]).to eq(user.username)
expect(push_notification_event[:params][1][:excerpt]).to eq("Hello @eviltrout ❤")
end
it "pushes nothing to suspended users" do
evil_trout.update_columns(suspended_till: 1.year.from_now)
expect { mention_post }.to_not change { Jobs::PushNotification.jobs.count }
events = DiscourseEvent.track_events { mention_post }
expect(events.find { |event| event[:event_name] == :push_notification }).not_to be_present
end
it "pushes nothing when the user is in 'do not disturb'" do
Fabricate(
:do_not_disturb_timing,
user: evil_trout,
starts_at: Time.zone.now,
ends_at: 1.day.from_now,
)
expect { mention_post }.to_not change { Jobs::PushNotification.jobs.count }
events = DiscourseEvent.track_events { mention_post }
expect(events.find { |event| event[:event_name] == :push_notification }).not_to be_present
end
it "correctly pushes notifications if configured correctly" do
Jobs.run_immediately!
body = nil
headers = nil
stub_request(:post, "https://site2.com/push").to_return do |request|
body = request.body
headers = request.headers
{ status: 200, body: "OK" }
end
set_subfolder "/subpath"
payload = {
"secret_key" => SiteSetting.push_api_secret_key,
"url" => Discourse.base_url,
"title" => SiteSetting.title,
"description" => SiteSetting.site_description,
"notifications" => [
{
"notification_type" => 1,
"post_number" => 1,
"topic_title" => topic.title,
"topic_id" => topic.id,
"excerpt" => "Hello @eviltrout ❤",
"username" => user.username,
"url" => UrlHelper.absolute(Discourse.base_path + mention_post.url),
"client_id" => "xxx0",
},
{
"notification_type" => 1,
"post_number" => 1,
"topic_title" => topic.title,
"topic_id" => topic.id,
"excerpt" => "Hello @eviltrout ❤",
"username" => user.username,
"url" => UrlHelper.absolute(Discourse.base_path + mention_post.url),
"client_id" => "xxx1",
},
],
}
post = mention_post
expect(JSON.parse(body)).to eq(payload)
expect(headers["Content-Type"]).to eq("application/json")
TopicUser.change(
evil_trout.id,
topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
post = Fabricate(:post, topic: post.topic, user_id: evil_trout.id)
user2 = Fabricate(:user)
# if we collapse a reply notification we should get notified on the correct post
new_post =
create_post_with_alerts(
topic: post.topic,
user_id: user.id,
reply_to_post_number: post.post_number,
raw: "this is my first reply",
)
changes = {
"notification_type" => Notification.types[:posted],
"post_number" => new_post.post_number,
"username" => new_post.user.username,
"excerpt" => new_post.raw,
"url" => UrlHelper.absolute(Discourse.base_path + new_post.url),
}
payload["notifications"][0].merge! changes
payload["notifications"][1].merge! changes
expect(JSON.parse(body)).to eq(payload)
new_post =
create_post_with_alerts(
topic: post.topic,
user_id: user2.id,
reply_to_post_number: post.post_number,
raw: "this is my second reply",
)
changes = {
"post_number" => new_post.post_number,
"username" => new_post.user.username,
"excerpt" => new_post.raw,
"url" => UrlHelper.absolute(Discourse.base_path + new_post.url),
}
payload["notifications"][0].merge! changes
payload["notifications"][1].merge! changes
expect(JSON.parse(body)).to eq(payload)
end
it "does not have invalid HTML in the excerpt" do
Fabricate(:category, slug: "random")
Jobs.run_immediately!
body = nil
stub_request(:post, "https://site2.com/push").to_return do |request|
body = request.body
{ status: 200, body: "OK" }
end
create_post_with_alerts(user: user, raw: "this, @eviltrout, is a test with #random")
expect(JSON.parse(body)["notifications"][0]["excerpt"]).to eq(
"this, @eviltrout, is a test with #random",
)
end
context "with push subscriptions" do
before do
Fabricate(:push_subscription, user: evil_trout)
SiteSetting.push_notification_time_window_mins = 10
end
it "delays sending push notification for active online user" do
evil_trout.update!(last_seen_at: 5.minutes.ago)
expect { mention_post }.to change { Jobs::SendPushNotification.jobs.count }
expect(Jobs::SendPushNotification.jobs[0]["at"]).not_to be_nil
end
it "delays sending push notification for active online user for the correct delay ammount" do
evil_trout.update!(last_seen_at: 5.minutes.ago)
# SiteSetting.push_notification_time_window_mins is 10
# last_seen_at is 5 minutes ago
# 10 minutes from now - 5 minutes ago = 5 minutes
delay = 5.minutes.from_now.to_f
expect { mention_post }.to change { Jobs::SendPushNotification.jobs.count }
expect(Jobs::SendPushNotification.jobs[0]["at"]).to be_within(30.second).of(delay)
end
it "does not delay push notification for inactive offline user" do
evil_trout.update!(last_seen_at: 40.minutes.ago)
expect { mention_post }.to change { Jobs::SendPushNotification.jobs.count }
expect(Jobs::SendPushNotification.jobs[0]["at"]).to be_nil
end
end
end
describe ".create_notification_alert" do
it "does nothing for suspended users" do
evil_trout.update_columns(suspended_till: 1.year.from_now)
events = nil
messages =
MessageBus.track_publish do
events =
DiscourseEvent.track_events do
PostAlerter.create_notification_alert(
user: evil_trout,
post: post,
notification_type: Notification.types[:custom],
excerpt: "excerpt",
username: "username",
)
end
end
expect(events.size).to eq(0)
expect(messages.size).to eq(0)
expect(Jobs::PushNotification.jobs.size).to eq(0)
end
it "does not publish to MessageBus /notification-alert if the user has not been seen for > 30 days, but still sends a push notification" do
evil_trout.update_columns(last_seen_at: 31.days.ago)
SiteSetting.allowed_user_api_push_urls = "https://site2.com/push"
UserApiKey.create!(
user_id: evil_trout.id,
client_id: "xxx#1",
application_name: "iPhone1",
scopes: ["notifications"].map { |name| UserApiKeyScope.new(name: name) },
push_url: "https://site2.com/push",
)
events = nil
messages =
MessageBus.track_publish do
events =
DiscourseEvent.track_events do
PostAlerter.create_notification_alert(
user: evil_trout,
post: post,
notification_type: Notification.types[:custom],
excerpt: "excerpt",
username: "username",
)
end
end
expect(events.map { |event| event[:event_name] }).to include(
:pre_notification_alert,
:push_notification,
:post_notification_alert,
)
expect(messages.size).to eq(0)
expect(Jobs::PushNotification.jobs.size).to eq(1)
end
end
describe "watching_first_post" do
fab!(:user)
fab!(:category)
fab!(:tag)
fab!(:topic) { Fabricate(:topic, category: category, tags: [tag]) }
fab!(:post) { Fabricate(:post, topic: topic) }
it "doesn't notify people who aren't watching" do
PostAlerter.post_created(post)
expect(
user.notifications.where(notification_type: Notification.types[:watching_first_post]).count,
).to eq(0)
end
it "notifies the user who is following the first post category" do
level = CategoryUser.notification_levels[:watching_first_post]
CategoryUser.set_notification_level_for_category(user, level, category.id)
PostAlerter.new.after_save_post(post, true)
expect(
user.notifications.where(notification_type: Notification.types[:watching_first_post]).count,
).to eq(1)
end
it "doesn't notify when the record is not new" do
level = CategoryUser.notification_levels[:watching_first_post]
CategoryUser.set_notification_level_for_category(user, level, category.id)
PostAlerter.new.after_save_post(post, false)
expect(
user.notifications.where(notification_type: Notification.types[:watching_first_post]).count,
).to eq(0)
end
it "notifies the user who is following the first post tag" do
level = TagUser.notification_levels[:watching_first_post]
TagUser.change(user.id, tag.id, level)
PostAlerter.post_created(post)
expect(
user.notifications.where(notification_type: Notification.types[:watching_first_post]).count,
).to eq(1)
end
it "notifies the user who is following the first post group" do
GroupUser.create(group_id: group.id, user_id: user.id)
GroupUser.create(group_id: group.id, user_id: post.user.id)
topic.topic_allowed_groups.create(group_id: group.id)
level = GroupUser.notification_levels[:watching_first_post]
GroupUser.where(user_id: user.id, group_id: group.id).update_all(notification_level: level)
PostAlerter.post_created(post)
expect(
user.notifications.where(notification_type: Notification.types[:watching_first_post]).count,
).to eq(1)
end
it "triggers :before_create_notifications_for_users" do
level = CategoryUser.notification_levels[:watching_first_post]
CategoryUser.set_notification_level_for_category(user, level, category.id)
events = DiscourseEvent.track_events { PostAlerter.new.after_save_post(post, true) }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[user], post],
)
end
it "sends a push notification when user has a push subscription" do
setup_push_notification_subscription_for(user: user)
level = CategoryUser.notification_levels[:watching_first_post]
CategoryUser.set_notification_level_for_category(user, level, category.id)
events =
DiscourseEvent.track_events(:push_notification) do
PostAlerter.new.after_save_post(post, true)
end
expect(
events.detect do |e|
e[:params][0] == user &&
e[:params][1][:notification_type] == Notification.types[:watching_first_post]
end,
).to be_present
end
end
context "with replies" do
it "triggers :before_create_notifications_for_users" do
_post = Fabricate(:post, user: user, topic: topic)
reply = Fabricate(:post, topic: topic, reply_to_post_number: 1)
events = DiscourseEvent.track_events { PostAlerter.post_created(reply) }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[user], reply],
)
end
it "notifies about regular reply" do
_post = Fabricate(:post, user: user, topic: topic)
reply = Fabricate(:post, topic: topic, reply_to_post_number: 1)
PostAlerter.post_created(reply)
expect(user.notifications.where(notification_type: Notification.types[:replied]).count).to eq(
1,
)
end
it "does not notify admins when suppress_secured_categories_from_admin is enabled" do
SiteSetting.suppress_secured_categories_from_admin = true
topic = Fabricate(:topic, category: private_category)
post = Fabricate(:post, raw: "hello @#{admin.username} how are you today?", topic: topic)
PostAlerter.post_created(post)
expect(admin.notifications.count).to eq(0)
end
it "doesn't notify regular user about whispered reply" do
_post = Fabricate(:post, user: user, topic: topic)
whispered_reply =
Fabricate(
:post,
user: admin,
topic: topic,
post_type: Post.types[:whisper],
reply_to_post_number: 1,
)
PostAlerter.post_created(whispered_reply)
expect(user.notifications.where(notification_type: Notification.types[:replied]).count).to eq(
0,
)
end
it "notifies staff user about whispered reply" do
admin1 = Fabricate(:admin)
admin2 = Fabricate(:admin)
_post = Fabricate(:post, user: user, topic: topic)
whispered_reply1 =
Fabricate(
:post,
user: admin1,
topic: topic,
post_type: Post.types[:whisper],
reply_to_post_number: 1,
)
whispered_reply2 =
Fabricate(
:post,
user: admin2,
topic: topic,
post_type: Post.types[:whisper],
reply_to_post_number: 2,
)
PostAlerter.post_created(whispered_reply1)
PostAlerter.post_created(whispered_reply2)
expect(
admin1.notifications.where(notification_type: Notification.types[:replied]).count,
).to eq(1)
TopicUser.change(
admin1.id,
topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
# this should change nothing cause the moderator post has an action code
# if we have an action code then we should never have notifications, this is rare but
# assign whispers are like this
whispered_reply3 =
topic.add_moderator_post(
admin2,
"i am a reply",
post_type: Post.types[:whisper],
action_code: "moderator_thing",
)
PostAlerter.post_created(whispered_reply3)
# if this whisper is not ignored like it should we would see a posted notification and no replied notifications
notifications = admin1.notifications.where(topic_id: topic.id).to_a
expect(notifications.first.notification_type).to eq(Notification.types[:replied])
expect(notifications.length).to eq(1)
expect(notifications.first.post_number).to eq(whispered_reply2.post_number)
end
it "sends email notifications only to users not on CC list of incoming email" do
alice = Fabricate(:user, username: "alice", email: "[email protected]")
bob = Fabricate(:user, username: "bob", email: "[email protected]")
carol = Fabricate(:user, username: "carol", email: "[email protected]", staged: true)
dave = Fabricate(:user, username: "dave", email: "[email protected]", staged: true)
erin = Fabricate(:user, username: "erin", email: "[email protected]")
topic =
Fabricate(
:private_message_topic,
topic_allowed_users: [
Fabricate.build(:topic_allowed_user, user: alice),
Fabricate.build(:topic_allowed_user, user: bob),
Fabricate.build(:topic_allowed_user, user: carol),
Fabricate.build(:topic_allowed_user, user: dave),
Fabricate.build(:topic_allowed_user, user: erin),
],
)
_post = Fabricate(:post, user: alice, topic: topic)
TopicUser.change(
alice.id,
topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
TopicUser.change(
bob.id,
topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
TopicUser.change(
erin.id,
topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
email =
Fabricate(
:incoming_email,
raw: <<~RAW,
Return-Path: <[email protected]>
From: Bob <[email protected]>
To: [email protected], [email protected]
CC: [email protected], [email protected]
Subject: Hello world
Date: Fri, 15 Jan 2016 00:12:43 +0100
Message-ID: <[email protected]>
Mime-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: quoted-printable
This post was created by email.
RAW
from_address: "[email protected]",
to_addresses: "[email protected];[email protected]",
cc_addresses: "[email protected];[email protected]",
)
reply =
Fabricate(
:post_via_email,
user: bob,
topic: topic,
incoming_email: email,
reply_to_post_number: 1,
)
NotificationEmailer.expects(:process_notification).with { |n| n.user_id == alice.id }.once
NotificationEmailer.expects(:process_notification).with { |n| n.user_id == bob.id }.never
NotificationEmailer.expects(:process_notification).with { |n| n.user_id == carol.id }.never
NotificationEmailer.expects(:process_notification).with { |n| n.user_id == dave.id }.never
NotificationEmailer.expects(:process_notification).with { |n| n.user_id == erin.id }.never
PostAlerter.post_created(reply)
expect(alice.notifications.count).to eq(1)
expect(bob.notifications.count).to eq(0)
expect(carol.notifications.count).to eq(1)
expect(dave.notifications.count).to eq(1)
expect(erin.notifications.count).to eq(1)
end
it "does not send email notifications to staged users when notification originates in mailinglist mirror category" do
category = Fabricate(:mailinglist_mirror_category)
topic = Fabricate(:topic, category: category)
user = Fabricate(:staged)
_post = Fabricate(:post, user: user, topic: topic)
reply = Fabricate(:post, topic: topic, reply_to_post_number: 1)
NotificationEmailer.expects(:process_notification).never
expect { PostAlerter.post_created(reply) }.not_to change(user.notifications, :count)
category.mailinglist_mirror = false
NotificationEmailer.expects(:process_notification).once
expect { PostAlerter.post_created(reply) }.to change(user.notifications, :count).by(1)
end
it "creates a notification of type `replied` instead of `posted` for the topic author if they're watching the topic" do
Jobs.run_immediately!
u1 = Fabricate(:admin)
u2 = Fabricate(:admin)
topic = create_topic(user: u1)
u1.notifications.destroy_all
expect do create_post(topic: topic, user: u2) end.to change {
u1.reload.notifications.count
}.by(1)
expect(
u1.notifications.exists?(
topic_id: topic.id,
notification_type: Notification.types[:replied],
post_number: 1,
read: false,
),
).to eq(true)
end
it "it doesn't notify about small action posts when the topic author is watching the topic " do
Jobs.run_immediately!
u1 = Fabricate(:admin)
u2 = Fabricate(:admin)
topic = create_topic(user: u1)
u1.notifications.destroy_all
expect do topic.update_status("closed", true, u2, message: "hello world") end.not_to change {
u1.reload.notifications.count
}
end
end
context "with category" do
context "with watching" do
it "triggers :before_create_notifications_for_users" do
topic = Fabricate(:topic, category: category)
post = Fabricate(:post, topic: topic)
level = CategoryUser.notification_levels[:watching]
CategoryUser.set_notification_level_for_category(user, level, category.id)
events = DiscourseEvent.track_events { PostAlerter.post_created(post) }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[user], post],
)
end
it "notifies staff about whispered post" do
topic = Fabricate(:topic, category: category)
level = CategoryUser.notification_levels[:watching]
CategoryUser.set_notification_level_for_category(admin, level, category.id)
CategoryUser.set_notification_level_for_category(user, level, category.id)
whispered_post =
Fabricate(:post, user: Fabricate(:admin), topic: topic, post_type: Post.types[:whisper])
expect { PostAlerter.post_created(whispered_post) }.to add_notification(
admin,
:watching_category_or_tag,
)
expect { PostAlerter.post_created(whispered_post) }.not_to add_notification(
user,
:watching_category_or_tag,
)
end
it "notifies a staged user about a private post, but only if the user has access" do
staged_member = Fabricate(:staged)
staged_non_member = Fabricate(:staged)
group_member = Fabricate(:user)
group.add(group_member)
group.add(staged_member)
level = CategoryUser.notification_levels[:watching]
CategoryUser.set_notification_level_for_category(group_member, level, private_category.id)
CategoryUser.set_notification_level_for_category(staged_member, level, private_category.id)
CategoryUser.set_notification_level_for_category(
staged_non_member,
level,
private_category.id,
)
topic = Fabricate(:topic, category: private_category, user: group_member)
post = Fabricate(:post, topic: topic)
expect { PostAlerter.post_created(post) }.to add_notification(
staged_member,
:watching_category_or_tag,
).and not_add_notification(staged_non_member, :watching_category_or_tag)
end
it "does not update existing unread notification" do
CategoryUser.set_notification_level_for_category(
user,
CategoryUser.notification_levels[:watching],
category.id,
)
topic = Fabricate(:topic, category: category)
post = Fabricate(:post, topic: topic)
PostAlerter.post_created(post)
notification = Notification.last
expect(notification.topic_id).to eq(topic.id)
expect(notification.post_number).to eq(1)
post = Fabricate(:post, topic: topic)
PostAlerter.post_created(post)
notification = Notification.last
expect(notification.topic_id).to eq(topic.id)
expect(notification.post_number).to eq(1)
notification_data = JSON.parse(notification.data)
expect(notification_data["display_username"]).to eq(I18n.t("embed.replies", count: 2))
end
it "sends a push notification when user has a push subscription" do
setup_push_notification_subscription_for(user: user)
topic = Fabricate(:topic, category: category)
post = Fabricate(:post, topic: topic)
level = CategoryUser.notification_levels[:watching]
CategoryUser.set_notification_level_for_category(user, level, category.id)
events = DiscourseEvent.track_events(:push_notification) { PostAlerter.post_created(post) }
expect(
events.detect do |e|
e[:params][0] == user &&
e[:params][1][:notification_type] == Notification.types[:watching_category_or_tag]
end,
).to be_present
end
end
end
context "with tags" do
context "with watching" do
it "triggers :before_create_notifications_for_users" do
tag = Fabricate(:tag)
topic = Fabricate(:topic, tags: [tag])
post = Fabricate(:post, topic: topic)
level = TagUser.notification_levels[:watching]
TagUser.change(user.id, tag.id, level)
events = DiscourseEvent.track_events { PostAlerter.post_created(post) }
expect(events).to include(
event_name: :before_create_notifications_for_users,
params: [[user], post],
)
end
it "does not update existing unread notification" do
tag = Fabricate(:tag)
TagUser.change(user.id, tag.id, TagUser.notification_levels[:watching])
topic = Fabricate(:topic, tags: [tag])
post = Fabricate(:post, topic: topic)
PostAlerter.post_created(post)
notification = Notification.last
expect(notification.topic_id).to eq(topic.id)
expect(notification.post_number).to eq(1)
post = Fabricate(:post, topic: topic)
PostAlerter.post_created(post)
notification = Notification.last
expect(notification.topic_id).to eq(topic.id)
expect(notification.post_number).to eq(1)
notification_data = JSON.parse(notification.data)
expect(notification_data["display_username"]).to eq(I18n.t("embed.replies", count: 2))
end
it "does not add notification if user does not belong to tag group with permissions" do
tag = Fabricate(:tag)
topic = Fabricate(:topic, tags: [tag])
post = Fabricate(:post, topic: topic)
tag_group = TagGroup.new(name: "Only visible to group", tag_names: [tag.name])
tag_group.permissions = [[group.id, TagGroupPermission.permission_types[:full]]]
tag_group.save!
TagUser.change(user.id, tag.id, TagUser.notification_levels[:watching])
expect { PostAlerter.post_created(post) }.not_to change { Notification.count }
end
it "adds notification if user belongs to tag group with permissions" do
tag = Fabricate(:tag)
topic = Fabricate(:topic, tags: [tag])
post = Fabricate(:post, topic: topic)
tag_group = Fabricate(:tag_group, tags: [tag])
Fabricate(:group_user, group: group, user: user)
Fabricate(:tag_group_permission, tag_group: tag_group, group: group)
TagUser.change(user.id, tag.id, TagUser.notification_levels[:watching])
expect { PostAlerter.post_created(post) }.to change { Notification.count }.by(1)
end
end
context "with category and tags" do
fab!(:muted_category) do
Fabricate(:category).tap do |category|
CategoryUser.set_notification_level_for_category(
user,
CategoryUser.notification_levels[:muted],
category.id,
)
end
end
fab!(:muted_tag) do
Fabricate(:tag).tap do |tag|
TagUser.create!(
user: user,
tag: tag,
notification_level: TagUser.notification_levels[:muted],
)
end
end
fab!(:watched_tag) do
Fabricate(:tag).tap do |tag|
TagUser.create!(
user: user,
tag: tag,
notification_level: TagUser.notification_levels[:watching],
)
end
end
fab!(:topic_with_muted_tag_and_watched_category) do
Fabricate(:topic, category: category, tags: [muted_tag])
end
fab!(:topic_with_muted_category_and_watched_tag) do
Fabricate(:topic, category: muted_category, tags: [watched_tag])
end
fab!(:directly_watched_topic) do
Fabricate(:topic, category: muted_category, tags: [muted_tag])
end
fab!(:topic_user) do
Fabricate(
:topic_user,
topic: directly_watched_topic,
user: user,
notification_level: TopicUser.notification_levels[:watching],
)
end
fab!(:topic_with_watched_category) { Fabricate(:topic, category: category) }
fab!(:post) { Fabricate(:post, topic: topic_with_muted_tag_and_watched_category) }
fab!(:post_2) { Fabricate(:post, topic: topic_with_muted_category_and_watched_tag) }
fab!(:post_3) { Fabricate(:post, topic: topic_with_watched_category) }
fab!(:post_4) { Fabricate(:post, topic: directly_watched_topic) }
before do
CategoryUser.set_notification_level_for_category(
user,
CategoryUser.notification_levels[:watching],
category.id,
)
end
it "adds notification when watched_precedence_over_muted setting is true" do
SiteSetting.watched_precedence_over_muted = true
expect {
PostAlerter.post_created(topic_with_muted_tag_and_watched_category.posts.first)
}.to change { Notification.count }.by(1)
expect {
PostAlerter.post_created(topic_with_muted_category_and_watched_tag.posts.first)
}.to change { Notification.count }.by(1)
expect { PostAlerter.post_created(directly_watched_topic.posts.first) }.to change {
Notification.count
}.by(1)
end
it "respects user option even if watched_precedence_over_muted site setting is true" do
SiteSetting.watched_precedence_over_muted = true
user.user_option.update!(watched_precedence_over_muted: false)
expect {
PostAlerter.post_created(topic_with_muted_tag_and_watched_category.posts.first)
}.not_to change { Notification.count }
expect {
PostAlerter.post_created(topic_with_muted_category_and_watched_tag.posts.first)
}.not_to change { Notification.count }
expect { PostAlerter.post_created(directly_watched_topic.posts.first) }.to change {
Notification.count
}.by(1)
end
it "does not add notification when watched_precedence_over_muted setting is false" do
SiteSetting.watched_precedence_over_muted = false
expect {
PostAlerter.post_created(topic_with_muted_tag_and_watched_category.posts.first)
}.not_to change { Notification.count }
expect {
PostAlerter.post_created(topic_with_muted_category_and_watched_tag.posts.first)
}.not_to change { Notification.count }
expect { PostAlerter.post_created(topic_with_watched_category.posts.first) }.to change {
Notification.count
}.by(1)
expect { PostAlerter.post_created(directly_watched_topic.posts.first) }.to change {
Notification.count
}.by(1)
end
it "respects user option even if watched_precedence_over_muted site setting is false" do
SiteSetting.watched_precedence_over_muted = false
user.user_option.update!(watched_precedence_over_muted: true)
expect {
PostAlerter.post_created(topic_with_muted_tag_and_watched_category.posts.first)
}.to change { Notification.count }.by(1)
expect {
PostAlerter.post_created(topic_with_muted_category_and_watched_tag.posts.first)
}.to change { Notification.count }.by(1)
expect { PostAlerter.post_created(directly_watched_topic.posts.first) }.to change {
Notification.count
}.by(1)
end
end
context "with on change" do
fab!(:user)
fab!(:other_tag) { Fabricate(:tag) }
fab!(:watched_tag) { Fabricate(:tag) }
before do
SiteSetting.tagging_enabled = true
Jobs.run_immediately!
TagUser.change(user.id, watched_tag.id, TagUser.notification_levels[:watching_first_post])
TopicUser.change(
Fabricate(:user).id,
post.topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
end
it "triggers a notification" do
expect(
user
.notifications
.where(notification_type: Notification.types[:watching_first_post])
.count,
).to eq(0)
expect {
PostRevisor.new(post).revise!(Fabricate(:user), tags: [other_tag.name, watched_tag.name])
}.to change { Notification.where(user_id: user.id).count }.by(1)
expect(
user
.notifications
.where(notification_type: Notification.types[:watching_first_post])
.count,
).to eq(1)
expect {
PostRevisor.new(post).revise!(Fabricate(:user), tags: [watched_tag.name, other_tag.name])
}.not_to change { Notification.count }
expect(
user
.notifications
.where(notification_type: Notification.types[:watching_first_post])
.count,
).to eq(1)
end
it "doesn't trigger a notification if topic is unlisted" do
post.topic.update_column(:visible, false)
expect(
user
.notifications
.where(notification_type: Notification.types[:watching_first_post])
.count,
).to eq(0)
PostRevisor.new(post).revise!(Fabricate(:user), tags: [other_tag.name, watched_tag.name])
expect(
user
.notifications
.where(notification_type: Notification.types[:watching_first_post])
.count,
).to eq(0)
end
end
context "with private message" do
fab!(:post) { Fabricate(:private_message_post) }
fab!(:other_tag) { Fabricate(:tag) }
fab!(:other_tag2) { Fabricate(:tag) }
fab!(:other_tag3) { Fabricate(:tag) }
fab!(:user)
fab!(:staged)
before do
SiteSetting.tagging_enabled = true
SiteSetting.pm_tags_allowed_for_groups = "1|2|3"
Jobs.run_immediately!
TopicUser.change(
user.id,
post.topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
TopicUser.change(
staged.id,
post.topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
TopicUser.change(
admin.id,
post.topic.id,
notification_level: TopicUser.notification_levels[:watching],
)
TagUser.change(staged.id, other_tag.id, TagUser.notification_levels[:watching])
TagUser.change(admin.id, other_tag3.id, TagUser.notification_levels[:watching])
post.topic.allowed_users << user
post.topic.allowed_users << staged
end
it "only notifies staff watching added tag" do
expect(PostRevisor.new(post).revise!(Fabricate(:admin), tags: [other_tag.name])).to be true
expect(Notification.where(user_id: staged.id).count).to eq(0)
expect(PostRevisor.new(post).revise!(Fabricate(:admin), tags: [other_tag2.name])).to be true
expect(Notification.where(user_id: admin.id).count).to eq(0)
expect(PostRevisor.new(post).revise!(Fabricate(:admin), tags: [other_tag3.name])).to be true
expect(Notification.where(user_id: admin.id).count).to eq(1)
end
end
context "with tag groups" do
fab!(:tag)
fab!(:user)
fab!(:topic) { Fabricate(:topic, tags: [tag]) }
fab!(:post) { Fabricate(:post, topic: topic) }
shared_examples "tag user with notification level" do |notification_level, notification_type|
it "notifies a user who is watching a tag that does not belong to a tag group" do
TagUser.change(user.id, tag.id, TagUser.notification_levels[notification_level])
PostAlerter.post_created(post)
expect(
user
.notifications
.where(notification_type: Notification.types[notification_type])
.count,
).to eq(1)
end
it "does not notify a user watching a tag with tag group permissions that he does not belong to" do
tag_group = Fabricate(:tag_group, tags: [tag], permissions: { group.name => 1 })
TagUser.change(user.id, tag.id, TagUser.notification_levels[notification_level])
PostAlerter.post_created(post)
expect(
user
.notifications
.where(notification_type: Notification.types[notification_type])
.count,
).to eq(0)
end
it "notifies a user watching a tag with tag group permissions that he belongs to" do
Fabricate(:group_user, group: group, user: user)
TagUser.change(user.id, tag.id, TagUser.notification_levels[notification_level])
PostAlerter.post_created(post)
expect(
user
.notifications
.where(notification_type: Notification.types[notification_type])
.count,
).to eq(1)
end
it "notifies a staff watching a tag with tag group permissions that he does not belong to" do
tag_group = Fabricate(:tag_group, tags: [tag])
Fabricate(:tag_group_permission, tag_group: tag_group, group: group)
staff_group = Group.find(Group::AUTO_GROUPS[:staff])
Fabricate(:group_user, group: staff_group, user: user)
TagUser.change(user.id, tag.id, TagUser.notification_levels[notification_level])
PostAlerter.post_created(post)
expect(
user
.notifications
.where(notification_type: Notification.types[notification_type])
.count,
).to eq(1)
end
end
context "with :watching notification level" do
include_examples "tag user with notification level", :watching, :watching_category_or_tag
end
context "with :watching_first_post notification level" do
include_examples "tag user with notification level",
:watching_first_post,
:watching_first_post
end
end
end
describe "#extract_linked_users" do
fab!(:post) { Fabricate(:post, topic: topic) }
fab!(:post2) { Fabricate(:post) }
describe "when linked post has been deleted" do
let(:topic_link) do
TopicLink.create!(
url: "/t/#{topic.id}",
topic_id: topic.id,
link_topic_id: post2.topic.id,
link_post_id: nil,
post_id: post.id,
user: user,
domain: "test.com",
)
end
it "should use the first post of the topic" do
topic_link
expect(PostAlerter.new.extract_linked_users(post.reload)).to eq([post2.user])
end
end
end
describe "#notify_post_users" do
fab!(:post) { Fabricate(:post, topic: topic) }
fab!(:last_editor) { Fabricate(:user) }
fab!(:tag)
fab!(:category)
it "creates single edit notification when post is modified" do
TopicUser.create!(
user_id: user.id,
topic_id: topic.id,
notification_level: TopicUser.notification_levels[:watching],
last_read_post_number: post.post_number,
)
PostRevisor.new(post).revise!(last_editor, tags: [tag.name])
PostAlerter.new.notify_post_users(post, [])
expect(Notification.count).to eq(1)
expect(Notification.last.notification_type).to eq(Notification.types[:edited])
expect(JSON.parse(Notification.last.data)["display_username"]).to eq(last_editor.username)
PostAlerter.new.notify_post_users(post, [])
expect(Notification.count).to eq(1)
end
it "creates posted notification when Sidekiq is slow" do
CategoryUser.set_notification_level_for_category(
user,
CategoryUser.notification_levels[:watching],
category.id,
)
post =
PostCreator.create!(
Fabricate(:user),
title: "one of my first topics",
raw: "one of my first posts",
category: category.id,
)
TopicUser.change(user, post.topic_id, last_read_post_number: post.post_number)
# Manually run job after the user read the topic to simulate a slow
# Sidekiq.
job_args = Jobs::PostAlert.jobs[0]["args"][0]
expect { Jobs::PostAlert.new.execute(job_args.with_indifferent_access) }.to change {
Notification.count
}.by(1)
expect(Notification.last.notification_type).to eq(Notification.types[:posted])
end
end
context "with SMTP (group_smtp_email)" do
before do
SiteSetting.enable_smtp = true
SiteSetting.email_in = true
Jobs.run_immediately!
end
fab!(:group) do
Fabricate(
:group,
smtp_server: "smtp.gmail.com",
smtp_port: 587,
smtp_ssl: true,
imap_server: "imap.gmail.com",
imap_port: 993,
imap_ssl: true,
email_username: "[email protected]",
email_password: "password",
smtp_enabled: true,
imap_enabled: true,
)
end
def create_post_with_incoming
raw_mail = <<~EMAIL
From: Foo <[email protected]>
To: [email protected]
Cc: [email protected], [email protected]
Subject: Full email group username flow
Date: Fri, 15 Jan 2021 00:12:43 +0100
Message-ID: <[email protected]>
Mime-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: 7bit
This is the first email.
EMAIL
Email::Receiver.new(raw_mail, {}).process!
end
it "does not error if SMTP is enabled and the topic has no incoming email or allowed groups" do
expect { PostAlerter.new.after_save_post(post, true) }.not_to raise_error
end
it "does not error if SMTP is enabled and the topic has no incoming email but does have an allowed group" do
TopicAllowedGroup.create(topic: private_message_topic, group: group)
expect { PostAlerter.new.after_save_post(post, true) }.not_to raise_error
end
it "does not error if SMTP is enabled and the topic has no incoming email but has multiple allowed groups" do
TopicAllowedGroup.create(topic: private_message_topic, group: group)
TopicAllowedGroup.create(topic: private_message_topic, group: Fabricate(:group))
expect { PostAlerter.new.after_save_post(post, true) }.not_to raise_error
end
it "sends a group smtp email because SMTP is enabled for the site and the group" do
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic)
expect { PostAlerter.new.after_save_post(post, true) }.to change {
ActionMailer::Base.deliveries.size
}.by(1)
email = ActionMailer::Base.deliveries.last
expect(email.from).to include(group.email_username)
expect(email.to).to contain_exactly(
topic.reload.topic_allowed_users.order(:created_at).first.user.email,
)
expect(email.cc).to match_array(%w[[email protected] [email protected]])
expect(email.subject).to eq("Re: #{topic.title}")
end
it "sends a group smtp email when the original group has had SMTP disabled and there is an additional topic allowed group" do
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
other_allowed_group = Fabricate(:smtp_group)
TopicAllowedGroup.create(group: other_allowed_group, topic: topic)
post = Fabricate(:post, topic: topic)
group.update!(smtp_enabled: false)
expect { PostAlerter.new.after_save_post(post, true) }.to change {
ActionMailer::Base.deliveries.size
}.by(1)
email = ActionMailer::Base.deliveries.last
expect(email.from).to include(other_allowed_group.email_username)
expect(email.to).to contain_exactly(
topic.reload.topic_allowed_users.order(:created_at).first.user.email,
)
expect(email.cc).to match_array(%w[[email protected] [email protected]])
expect(email.subject).to eq("Re: #{topic.title}")
end
it "does not send a group smtp email if smtp is not enabled for the group" do
group.update!(smtp_enabled: false)
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic)
expect { PostAlerter.new.after_save_post(post, true) }.not_to change {
ActionMailer::Base.deliveries.size
}
end
it "does not send a group smtp email if SiteSetting.enable_smtp is false" do
SiteSetting.enable_smtp = false
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic)
expect { PostAlerter.new.after_save_post(post, true) }.not_to change {
ActionMailer::Base.deliveries.size
}
end
it "does not send group smtp emails for a whisper" do
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic, post_type: Post.types[:whisper])
expect { PostAlerter.new.after_save_post(post, true) }.not_to change {
ActionMailer::Base.deliveries.size
}
end
it "sends the group smtp email job with a delay of personal_email_time_window_seconds" do
freeze_time
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic)
PostAlerter.new.after_save_post(post, true)
job_enqueued?(
job: :group_smtp_email,
args: {
group_id: group.id,
post_id: post.id,
email: topic.reload.topic_allowed_users.order(:created_at).first.user.email,
cc_emails: %w[[email protected] [email protected]],
},
at: Time.zone.now + SiteSetting.personal_email_time_window_seconds.seconds,
)
end
it "does not send a group smtp email for anyone if the reply post originates from an incoming email that is auto generated" do
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic)
Fabricate(:incoming_email, post: post, topic: topic, is_auto_generated: true)
expect_not_enqueued_with(job: :group_smtp_email) do
expect { PostAlerter.new.after_save_post(post, true) }.not_to change {
ActionMailer::Base.deliveries.size
}
end
end
it "skips sending a notification email to the group and all other email addresses that are _not_ members of the group,
sends a group_smtp_email instead" do
NotificationEmailer.enable
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
group_user1 = Fabricate(:group_user, group: group)
group_user2 = Fabricate(:group_user, group: group)
TopicUser.create(
user: group_user1.user,
notification_level: TopicUser.notification_levels[:watching],
topic: topic,
)
post = Fabricate(:post, topic: topic.reload)
# Sends an email for:
#
# 1. the group user that is watching the post (but does not send this email with group SMTO)
# 2. the group smtp email to notify all topic_users not in the group
expect { PostAlerter.new.after_save_post(post, true) }.to change {
ActionMailer::Base.deliveries.size
}.by(2).and change { Notification.count }.by(2)
# The group smtp email
email = ActionMailer::Base.deliveries.first
expect(email.from).to eq([group.email_username])
expect(email.to).to contain_exactly("[email protected]")
expect(email.cc).to match_array(%w[[email protected] [email protected]])
expect(email.subject).to eq("Re: #{topic.title}")
# The watching group user notification email
email = ActionMailer::Base.deliveries.last
expect(email.from).to eq([SiteSetting.notification_email])
expect(email.to).to contain_exactly(group_user1.user.email)
expect(email.cc).to eq(nil)
expect(email.subject).to eq("[Discourse] [PM] #{topic.title}")
end
it "skips sending a notification email to the cc address that was added on the same post with an incoming email" do
NotificationEmailer.enable
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic.reload)
expect { PostAlerter.new.after_save_post(post, true) }.to change {
ActionMailer::Base.deliveries.size
}.by(1).and change { Notification.count }.by(1)
email = ActionMailer::Base.deliveries.last
# the reply post from someone who was emailed
reply_raw_mail = <<~EMAIL
From: Bar <[email protected]>
To: [email protected]
Cc: [email protected], [email protected]
Subject: #{email.subject}
Date: Fri, 16 Jan 2021 00:12:43 +0100
Message-ID: <[email protected]>
In-Reply-To: #{email.message_id}
Mime-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: 7bit
Hey here is my reply!
EMAIL
reply_post_from_email = nil
expect {
reply_post_from_email = Email::Receiver.new(reply_raw_mail, {}).process!
}.to change {
User.count # the two new cc addresses have users created
}.by(2).and change {
TopicAllowedUser.where(topic: topic).count # and they are added as topic allowed users
}.by(2).and change {
# but they are not sent emails because they were cc'd on an email, only [email protected]
# is emailed because he is a topic allowed user cc'd on the _original_ email and he is not
# the one creating the post, and [email protected], who is the OP of the topic
ActionMailer::Base.deliveries.size
}.by(1).and change {
Notification.count # and they are still sent their normal discourse notification
}.by(2)
email = ActionMailer::Base.deliveries.last
expect(email.to).to eq(["[email protected]"])
expect(email.cc).to eq(["[email protected]"])
expect(email.from).to eq([group.email_username])
expect(email.subject).to eq("Re: #{topic.title}")
end
it "handles the OP of the topic replying by email and sends a group email to the other topic allowed users successfully" do
NotificationEmailer.enable
incoming_email_post = create_post_with_incoming
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic.reload)
expect { PostAlerter.new.after_save_post(post, true) }.to change {
ActionMailer::Base.deliveries.size
}.by(1).and change { Notification.count }.by(1)
email = ActionMailer::Base.deliveries.last
# the reply post from someone who was emailed
reply_raw_mail = <<~EMAIL
From: Foo <[email protected]>
To: [email protected]
Cc: [email protected], [email protected]
Subject: #{email.subject}
Date: Fri, 16 Jan 2021 00:12:43 +0100
Message-ID: <[email protected]>
In-Reply-To: #{email.message_id}
Mime-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: 7bit
I am ~~Commander Shepherd~~ the OP and I approve of this message.
EMAIL
reply_post_from_email = nil
expect {
reply_post_from_email = Email::Receiver.new(reply_raw_mail, {}).process!
}.to change {
User.count # the two new cc addresses have users created
}.by(2).and change {
TopicAllowedUser.where(topic: topic).count # and they are added as topic allowed users
}.by(2).and change {
# but they are not sent emails because they were cc'd on an email, only [email protected]
# is emailed because he is a topic allowed user cc'd on the _original_ email and he is not
# the one creating the post
ActionMailer::Base.deliveries.size
}.by(1).and change {
Notification.count # and they are still sent their normal discourse notification
}.by(2)
email = ActionMailer::Base.deliveries.last
expect(email.to).to eq(["[email protected]"])
expect(email.cc).to eq(["[email protected]"])
expect(email.from).to eq([group.email_username])
expect(email.subject).to eq("Re: #{topic.title}")
end
it "handles the OP of the topic replying by email and cc'ing new people, and does not send a group SMTP email to those newly cc'd users" do
NotificationEmailer.enable
# this is a special case where we are not CC'ing on the original email,
# only on the follow up email
raw_mail = <<~EMAIL
From: Foo <[email protected]>
To: [email protected]
Subject: Full email group username flow
Date: Fri, 14 Jan 2021 00:12:43 +0100
Message-ID: <[email protected]>
Mime-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: 7bit
This is the first email.
EMAIL
incoming_email_post = Email::Receiver.new(raw_mail, {}).process!
topic = incoming_email_post.topic
post = Fabricate(:post, topic: topic.reload)
expect { PostAlerter.new.after_save_post(post, true) }.to change {
ActionMailer::Base.deliveries.size
}.by(1).and change { Notification.count }.by(1)
email = ActionMailer::Base.deliveries.last
# the reply post from the OP, cc'ing new people in
reply_raw_mail = <<~EMAIL
From: Foo <[email protected]>
To: [email protected]
Cc: [email protected], [email protected]
Subject: #{email.subject}
Date: Fri, 16 Jan 2021 00:12:43 +0100
Message-ID: <[email protected]>
In-Reply-To: #{email.message_id}
Mime-Version: 1.0
Content-Type: text/plain
Content-Transfer-Encoding: 7bit
I am inviting my mates to this email party.
EMAIL
reply_post_from_email = nil
expect {
reply_post_from_email = Email::Receiver.new(reply_raw_mail, {}).process!
}.to change {
User.count # the two new cc addresses have users created
}.by(2).and change {
TopicAllowedUser.where(topic: topic).count # and they are added as topic allowed users
}.by(2).and not_change {
# but they are not sent emails because they were cc'd on an email.
# no group smtp message is sent because the OP is not sent an email,
# they made this post.
ActionMailer::Base.deliveries.size
}.and change {
Notification.count # and they are still sent their normal discourse notification
}.by(2)
last_email = ActionMailer::Base.deliveries.last
expect(email).to eq(last_email)
end
end
describe "storing custom data" do
let(:custom_data) { "custom_string" }
it "stores custom data inside a notification" do
PostAlerter.new.create_notification(
admin,
Notification.types[:liked],
post,
custom_data: {
custom_key: custom_data,
},
)
liked_notification = Notification.where(notification_type: Notification.types[:liked]).last
expect(liked_notification.data_hash[:custom_key]).to eq(custom_data)
end
end
it "does not create notifications for PMs if not invited" do
SiteSetting.pm_tags_allowed_for_groups = "#{Group::AUTO_GROUPS[:everyone]}"
watching_first_post_tag = Fabricate(:tag)
TagUser.change(
admin.id,
watching_first_post_tag.id,
TagUser.notification_levels[:watching_first_post],
)
watching_tag = Fabricate(:tag)
TagUser.change(admin.id, watching_tag.id, TagUser.notification_levels[:watching])
post =
create_post(
tags: [watching_first_post_tag.name, watching_tag.name],
archetype: Archetype.private_message,
target_usernames: "#{evil_trout.username}",
)
expect { PostAlerter.new.after_save_post(post, true) }.to change { Notification.count }.by(1)
notification = Notification.last
expect(notification.user).to eq(evil_trout)
expect(notification.notification_type).to eq(Notification.types[:private_message])
expect(notification.topic).to eq(post.topic)
expect(notification.post_number).to eq(1)
end
it "does not create multiple notifications for same post" do
category = Fabricate(:category)
CategoryUser.set_notification_level_for_category(
user,
NotificationLevels.all[:tracking],
category.id,
)
watching_first_post_tag = Fabricate(:tag)
TagUser.change(
user.id,
watching_first_post_tag.id,
TagUser.notification_levels[:watching_first_post],
)
watching_tag = Fabricate(:tag)
TagUser.change(user.id, watching_tag.id, TagUser.notification_levels[:watching])
post = create_post(category: category, tags: [watching_first_post_tag.name, watching_tag.name])
expect { PostAlerter.new.after_save_post(post, true) }.to change { Notification.count }.by(1)
notification = Notification.last
expect(notification.user).to eq(user)
expect(notification.notification_type).to eq(Notification.types[:watching_category_or_tag])
expect(notification.topic).to eq(post.topic)
expect(notification.post_number).to eq(1)
end
it "triggers all discourse events" do
expected_events = %i[
post_alerter_before_mentions
post_alerter_before_replies
post_alerter_before_quotes
post_alerter_before_linked
post_alerter_before_post
post_alerter_before_first_post
post_alerter_after_save_post
]
events = DiscourseEvent.track_events { PostAlerter.new.after_save_post(post, true) }
# Expect all the notification events are called
# There are some other events triggered from outside after_save_post
expect(events.map { |e| e[:event_name] }).to include(*expected_events)
# Expect each notification event is called with the right parameters
events.each do |event|
if expected_events.include?(event[:event_name])
expect(event[:params]).to eq([post, true, [post.user]])
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class ExternalUploadManager
DOWNLOAD_LIMIT = 100.megabytes
SIZE_MISMATCH_BAN_MINUTES = 5
BAN_USER_REDIS_PREFIX = "ban_user_from_external_uploads_"
UPLOAD_TYPES_EXCLUDED_FROM_UPLOAD_PROMOTION = ["backup"].freeze
class ChecksumMismatchError < StandardError
end
class DownloadFailedError < StandardError
end
class CannotPromoteError < StandardError
end
class SizeMismatchError < StandardError
end
attr_reader :external_upload_stub
def self.ban_user_from_external_uploads!(user:, ban_minutes: 5)
Discourse.redis.setex("#{BAN_USER_REDIS_PREFIX}#{user.id}", ban_minutes.minutes.to_i, "1")
end
def self.user_banned?(user)
Discourse.redis.get("#{BAN_USER_REDIS_PREFIX}#{user.id}") == "1"
end
def self.create_direct_upload(current_user:, file_name:, file_size:, upload_type:, metadata: {})
store = store_for_upload_type(upload_type)
url, signed_headers = store.signed_request_for_temporary_upload(file_name, metadata: metadata)
key = store.s3_helper.path_from_url(url)
upload_stub =
ExternalUploadStub.create!(
key: key,
created_by: current_user,
original_filename: file_name,
upload_type: upload_type,
filesize: file_size,
)
{
url: url,
key: key,
unique_identifier: upload_stub.unique_identifier,
signed_headers: signed_headers,
}
end
def self.create_direct_multipart_upload(
current_user:,
file_name:,
file_size:,
upload_type:,
metadata: {}
)
content_type = MiniMime.lookup_by_filename(file_name)&.content_type
store = store_for_upload_type(upload_type)
multipart_upload = store.create_multipart(file_name, content_type, metadata: metadata)
upload_stub =
ExternalUploadStub.create!(
key: multipart_upload[:key],
created_by: current_user,
original_filename: file_name,
upload_type: upload_type,
external_upload_identifier: multipart_upload[:upload_id],
multipart: true,
filesize: file_size,
)
{
external_upload_identifier: upload_stub.external_upload_identifier,
key: upload_stub.key,
unique_identifier: upload_stub.unique_identifier,
}
end
def self.store_for_upload_type(upload_type)
if upload_type == "backup"
if !SiteSetting.enable_backups? ||
SiteSetting.backup_location != BackupLocationSiteSetting::S3
raise Discourse::InvalidAccess.new
end
BackupRestore::BackupStore.create
else
Discourse.store
end
end
def initialize(external_upload_stub, upload_create_opts = {})
@external_upload_stub = external_upload_stub
@upload_create_opts = upload_create_opts
@store = ExternalUploadManager.store_for_upload_type(external_upload_stub.upload_type)
end
def can_promote?
external_upload_stub.status == ExternalUploadStub.statuses[:created]
end
def transform!
raise CannotPromoteError if !can_promote?
external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded])
# We require that the file size is specified ahead of time, and compare
# it here to make sure that people are not uploading excessively large
# files to the external provider. If this happens, the user will be banned
# from uploading to the external provider for N minutes.
if external_size != external_upload_stub.filesize
ExternalUploadManager.ban_user_from_external_uploads!(
user: external_upload_stub.created_by,
ban_minutes: SIZE_MISMATCH_BAN_MINUTES,
)
raise SizeMismatchError.new(
"expected: #{external_upload_stub.filesize}, actual: #{external_size}",
)
end
if UPLOAD_TYPES_EXCLUDED_FROM_UPLOAD_PROMOTION.include?(external_upload_stub.upload_type)
move_to_final_destination
else
promote_to_upload
end
rescue StandardError
if !SiteSetting.enable_upload_debug_mode
# We don't need to do anything special to abort multipart uploads here,
# because at this point (calling promote_to_upload!), the multipart
# upload would already be complete.
@store.delete_file(external_upload_stub.key)
external_upload_stub.destroy!
else
external_upload_stub.update(status: ExternalUploadStub.statuses[:failed])
end
raise
end
private
def promote_to_upload
# This could be legitimately nil, if it's too big to download on the
# server, or it could have failed. To this end we set a should_download
# variable as well to check.
tempfile = nil
should_download = external_size < DOWNLOAD_LIMIT
if should_download
tempfile = download(external_upload_stub.key, external_upload_stub.upload_type)
raise DownloadFailedError if tempfile.blank?
actual_sha1 = Upload.generate_digest(tempfile)
raise ChecksumMismatchError if external_sha1 && external_sha1 != actual_sha1
end
opts = {
type: external_upload_stub.upload_type,
existing_external_upload_key: external_upload_stub.key,
external_upload_too_big: external_size > DOWNLOAD_LIMIT,
filesize: external_size,
}.merge(@upload_create_opts)
UploadCreator.new(tempfile, external_upload_stub.original_filename, opts).create_for(
external_upload_stub.created_by_id,
)
ensure
tempfile&.close!
end
def move_to_final_destination
content_type = MiniMime.lookup_by_filename(external_upload_stub.original_filename).content_type
@store.move_existing_stored_upload(
existing_external_upload_key: external_upload_stub.key,
original_filename: external_upload_stub.original_filename,
content_type: content_type,
)
Struct.new(:errors).new([])
end
def external_stub_object
@external_stub_object ||= @store.object_from_path(external_upload_stub.key)
end
def external_etag
@external_etag ||= external_stub_object.etag
end
def external_size
@external_size ||= external_stub_object.size
end
def external_sha1
@external_sha1 ||= external_stub_object.metadata["sha1-checksum"]
end
def download(key, type)
url = @store.signed_url_for_path(external_upload_stub.key)
uri = URI(url)
FileHelper.download(
url,
max_file_size: DOWNLOAD_LIMIT,
tmp_file_name: "discourse-upload-#{type}",
follow_redirect: true,
# Local S3 servers (like minio) do not use port 80, and the Aws::Sigv4::Signer
# includes the port number in the Host header when presigning URLs if the
# port is not 80, so we have to make sure the Host header sent by
# FinalDestination includes the port, otherwise we will get a
# `SignatureDoesNotMatch` error.
include_port_in_host_header: uri.scheme == "http" && uri.port != 80,
)
end
end
``` | # frozen_string_literal: true
RSpec.describe ExternalUploadManager do
subject(:manager) { ExternalUploadManager.new(external_upload_stub) }
fab!(:user)
let!(:logo_file) { file_from_fixtures("logo.png") }
let!(:pdf_file) { file_from_fixtures("large.pdf", "pdf") }
let(:object_size) { 1.megabyte }
let(:etag) { "e696d20564859cbdf77b0f51cbae999a" }
let(:client_sha1) { Upload.generate_digest(object_file) }
let(:sha1) { Upload.generate_digest(object_file) }
let(:object_file) { logo_file }
let(:external_upload_stub_metadata) { {} }
let!(:external_upload_stub) { Fabricate(:image_external_upload_stub, created_by: user) }
let(:s3_bucket_name) { SiteSetting.s3_upload_bucket }
before do
SiteSetting.authorized_extensions += "|pdf"
SiteSetting.max_attachment_size_kb = 210.megabytes / 1000
setup_s3
SiteSetting.s3_backup_bucket = "s3-backup-bucket"
SiteSetting.backup_location = BackupLocationSiteSetting::S3
prepare_fake_s3
stub_download_object_filehelper
end
describe "#ban_user_from_external_uploads!" do
after { Discourse.redis.flushdb }
it "bans the user from external uploads using a redis key" do
ExternalUploadManager.ban_user_from_external_uploads!(user: user)
expect(ExternalUploadManager.user_banned?(user)).to eq(true)
end
end
describe "#can_promote?" do
it "returns false if the external stub status is not created" do
external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded])
expect(manager.can_promote?).to eq(false)
end
end
describe "#transform!" do
context "when stubbed upload is < DOWNLOAD_LIMIT (small enough to download + generate sha)" do
let!(:external_upload_stub) do
Fabricate(:image_external_upload_stub, created_by: user, filesize: object_size)
end
let(:object_size) { 1.megabyte }
let(:object_file) { logo_file }
context "when the download of the s3 file fails" do
before { FileHelper.stubs(:download).returns(nil) }
it "raises an error" do
expect { manager.transform! }.to raise_error(ExternalUploadManager::DownloadFailedError)
end
end
context "when the upload is not in the created status" do
before { external_upload_stub.update!(status: ExternalUploadStub.statuses[:uploaded]) }
it "raises an error" do
expect { manager.transform! }.to raise_error(ExternalUploadManager::CannotPromoteError)
end
end
context "when the upload does not get changed in UploadCreator (resized etc.)" do
it "copies the stubbed upload on S3 to its new destination and deletes it" do
upload = manager.transform!
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(@fake_s3.operation_called?(:copy_object)).to eq(true)
expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present
expect(bucket.find_object(external_upload_stub.key)).to be_nil
end
it "errors if the image upload is too big" do
SiteSetting.max_image_size_kb = 1
upload = manager.transform!
expect(upload.errors.full_messages).to include(
"Filesize " +
I18n.t(
"upload.images.too_large_humanized",
max_size:
ActiveSupport::NumberHelper.number_to_human_size(
SiteSetting.max_image_size_kb.kilobytes,
),
),
)
end
it "errors if the extension is not supported" do
SiteSetting.authorized_extensions = ""
upload = manager.transform!
expect(upload.errors.full_messages).to include(
"Original filename " + I18n.t("upload.unauthorized", authorized_extensions: ""),
)
end
end
context "when the upload does get changed by the UploadCreator" do
let(:object_file) { file_from_fixtures("should_be_jpeg.heic", "images") }
let(:object_size) { 1.megabyte }
let(:external_upload_stub) do
Fabricate(
:image_external_upload_stub,
original_filename: "should_be_jpeg.heic",
filesize: object_size,
)
end
it "creates a new upload in s3 (not copy) and deletes the original stubbed upload" do
upload = manager.transform!
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(@fake_s3.operation_called?(:copy_object)).to eq(false)
expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present
expect(bucket.find_object(external_upload_stub.key)).to be_nil
end
end
context "when the sha has been set on the s3 object metadata by the clientside JS" do
let(:external_upload_stub_metadata) { { "sha1-checksum" => client_sha1 } }
context "when the downloaded file sha1 does not match the client sha1" do
let(:client_sha1) { "blahblah" }
it "raises an error, deletes the stub" do
expect { manager.transform! }.to raise_error(
ExternalUploadManager::ChecksumMismatchError,
)
expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false)
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_nil
end
it "does not delete the stub if enable_upload_debug_mode" do
SiteSetting.enable_upload_debug_mode = true
expect { manager.transform! }.to raise_error(
ExternalUploadManager::ChecksumMismatchError,
)
external_stub = ExternalUploadStub.find(external_upload_stub.id)
expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed])
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_present
end
end
end
context "when the downloaded file size does not match the expected file size for the upload stub" do
before { external_upload_stub.update!(filesize: 10) }
after { Discourse.redis.flushdb }
it "raises an error, deletes the file immediately, and prevents the user from uploading external files for a few minutes" do
expect { manager.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError)
expect(ExternalUploadStub.exists?(id: external_upload_stub.id)).to eq(false)
expect(
Discourse.redis.get(
"#{ExternalUploadManager::BAN_USER_REDIS_PREFIX}#{external_upload_stub.created_by_id}",
),
).to eq("1")
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_nil
end
it "does not delete the stub if enable_upload_debug_mode" do
SiteSetting.enable_upload_debug_mode = true
expect { manager.transform! }.to raise_error(ExternalUploadManager::SizeMismatchError)
external_stub = ExternalUploadStub.find(external_upload_stub.id)
expect(external_stub.status).to eq(ExternalUploadStub.statuses[:failed])
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_present
end
end
end
context "when stubbed upload is > DOWNLOAD_LIMIT (too big to download, generate a fake sha)" do
let(:object_size) { 200.megabytes }
let(:object_file) { pdf_file }
let!(:external_upload_stub) do
Fabricate(:attachment_external_upload_stub, created_by: user, filesize: object_size)
end
before do
UploadCreator
.any_instance
.stubs(:generate_fake_sha1_hash)
.returns("testbc60eb18e8f974cbfae8bb0f069c3a311024")
end
it "does not try and download the file" do
FileHelper.expects(:download).never
manager.transform!
end
it "generates a fake sha for the upload record" do
upload = manager.transform!
expect(upload.sha1).not_to eq(sha1)
expect(upload.original_sha1).to eq(nil)
expect(upload.filesize).to eq(object_size)
end
it "marks the stub as uploaded" do
manager.transform!
expect(external_upload_stub.reload.status).to eq(ExternalUploadStub.statuses[:uploaded])
end
it "copies the stubbed upload on S3 to its new destination and deletes it" do
upload = manager.transform!
bucket = @fake_s3.bucket(SiteSetting.s3_upload_bucket)
expect(bucket.find_object(Discourse.store.get_path_for_upload(upload))).to be_present
expect(bucket.find_object(external_upload_stub.key)).to be_nil
end
end
context "when the upload type is backup" do
let(:object_size) { 200.megabytes }
let(:object_file) { file_from_fixtures("backup_since_v1.6.tar.gz", "backups") }
let!(:external_upload_stub) do
Fabricate(
:attachment_external_upload_stub,
created_by: user,
filesize: object_size,
upload_type: "backup",
original_filename: "backup_since_v1.6.tar.gz",
folder_prefix: RailsMultisite::ConnectionManagement.current_db,
)
end
let(:s3_bucket_name) { SiteSetting.s3_backup_bucket }
it "does not try and download the file" do
FileHelper.expects(:download).never
manager.transform!
end
it "raises an error when backups are disabled" do
SiteSetting.enable_backups = false
expect { manager.transform! }.to raise_error(Discourse::InvalidAccess)
end
it "raises an error when backups are local, not s3" do
SiteSetting.backup_location = BackupLocationSiteSetting::LOCAL
expect { manager.transform! }.to raise_error(Discourse::InvalidAccess)
end
it "does not create an upload record" do
expect { manager.transform! }.not_to change { Upload.count }
end
it "copies the stubbed upload on S3 to its new destination and deletes it" do
bucket = @fake_s3.bucket(SiteSetting.s3_backup_bucket)
expect(bucket.find_object(external_upload_stub.key)).to be_present
manager.transform!
expect(
bucket.find_object(
"#{RailsMultisite::ConnectionManagement.current_db}/backup_since_v1.6.tar.gz",
),
).to be_present
expect(bucket.find_object(external_upload_stub.key)).to be_nil
end
end
end
def stub_download_object_filehelper
signed_url = Discourse.store.signed_url_for_path(external_upload_stub.key)
uri = URI.parse(signed_url)
signed_url = uri.to_s.gsub(uri.query, "")
stub_request(:get, signed_url).with(query: hash_including({})).to_return(
status: 200,
body: object_file.read,
)
end
def prepare_fake_s3
@fake_s3 = FakeS3.create
@fake_s3.bucket(s3_bucket_name).put_object(
key: external_upload_stub.key,
size: object_size,
last_modified: Time.zone.now,
metadata: external_upload_stub_metadata,
)
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
##
# Anything that we want to be able to bookmark must be registered as a
# bookmarkable type using Plugin::Instance#register_bookmarkable(bookmarkable_klass),
# where the bookmarkable_klass is a class that implements this BaseBookmarkable
# interface. Some examples are TopicBookmarkable and PostBookmarkable.
#
# These methods are then called by the RegisteredBookmarkable class through a public
# interface, used in places where we need to list, send reminders for,
# or otherwise interact with bookmarks in a way that is unique to the
# bookmarkable type.
#
# See RegisteredBookmarkable for additional documentation.
class BaseBookmarkable
attr_reader :model, :serializer, :preload_associations
# @return [ActiveRecord::Base] The ActiveRecord model class which will be used to denote
# the type of the bookmarkable upon registration along with
# querying.
def self.model
raise NotImplementedError
end
# @return [ApplicationSerializer] The serializer class inheriting from UserBookmarkBaseSerializer
def self.serializer
raise NotImplementedError
end
# @return [Array] Used for preloading associations on the bookmarks for listing
# purposes. Should be in the same format used for .includes() e.g.
#
# [{ topic: [:topic_users, :tags] }, :user]
def self.preload_associations
nil
end
def self.has_preloads?
preload_associations.present?
end
##
#
# Implementations can define their own preloading logic here
# @param [Array] bookmarks_of_type The list of bookmarks to preload data for. Already filtered to be of the correct class.
# @param [Guardian] guardian An instance of Guardian for the current_user
def self.perform_custom_preload!(bookmarks_of_type, guardian)
nil
end
##
# This is where the main query to filter the bookmarks by the provided bookmarkable
# type should occur. This should join on additional tables that are required later
# on to preload additional data for serializers, and also is the place where the
# bookmarks should be filtered based on security checks, which is why the Guardian
# instance is provided.
#
# @param [User] user The user to perform the query for, this scopes the bookmarks returned.
# @param [Guardian] guardian An instance of Guardian for the user to be used for security filters.
# @return [Bookmark::ActiveRecord_AssociationRelation] Should be an appropriately scoped list of bookmarks for the user.
def self.list_query(user, guardian)
raise NotImplementedError
end
##
# Called from BookmarkQuery when the initial results have been returned by
# perform_list_query. The search_query should join additional tables required
# to filter the bookmarks further, as well as defining a string used for
# where_sql, which can include comparisons with the :q parameter.
#
# @param [Bookmark::ActiveRecord_Relation] bookmarks The bookmark records returned by perform_list_query
# @param [String] query The search query from the user surrounded by the %% wildcards
# @param [String] ts_query The postgres TSQUERY string used for comparisons with full text search columns
# @param [Block] bookmarkable_search This block _must_ be called with the additional WHERE clause SQL relevant
# for the bookmarkable to be searched, as well as the bookmarks relation
# with any additional joins applied.
# @return [Bookmark::ActiveRecord_AssociationRelation] The list of bookmarks from perform_list_query filtered further by
# the query parameter.
def self.search_query(bookmarks, query, ts_query, &bookmarkable_search)
raise NotImplementedError
end
##
# When sending bookmark reminders, we want to make sure that whatever we
# are sending the reminder for has not been deleted or is otherwise inaccessible.
# Most of the time we can just check if the bookmarkable record is present
# because it will be trashable, though in some cases there will be additional
# conditions in the form of a lambda that we should use instead.
#
# The logic around whether it is the right time to send a reminder does not belong
# here, that is done in the BookmarkReminderNotifications job.
#
# @param [Bookmark] bookmark The bookmark that we are considering sending a reminder for.
# @return [Boolean]
def self.reminder_conditions(bookmark)
raise NotImplementedError
end
##
# Different bookmarkables may have different ways of notifying a user or presenting
# the reminder and what it is for, so it is up to the bookmarkable to register
# its preferred method of sending the reminder.
#
# @param [Bookmark] bookmark The bookmark that we are sending the reminder notification for.
# @return [void]
def self.reminder_handler(bookmark)
raise NotImplementedError
end
##
# Can be used by the inheriting class via reminder_handler, most of the
# time we just want to make a Notification for a bookmark reminder, this
# gives consumers a way to do it without having provide all of the required
# data themselves.
#
# @param [Bookmark] bookmark The bookmark that we are sending the reminder notification for.
# @param [Hash] notification_data Any data, either top-level (e.g. topic_id, post_number) or inside
# the data sub-key, which should be stored when the notification is
# created.
# @return [void]
def self.send_reminder_notification(bookmark, notification_data)
if notification_data[:data].blank? || notification_data[:data][:bookmarkable_url].blank? ||
notification_data[:data][:title].blank?
raise Discourse::InvalidParameters.new(
"A `data` key must be present with at least `bookmarkable_url` and `title` entries.",
)
end
notification_data[:data] = notification_data[:data].merge(
display_username: bookmark.user.username,
bookmark_name: bookmark.name,
bookmark_id: bookmark.id,
).to_json
notification_data[:notification_type] = Notification.types[:bookmark_reminder]
bookmark.user.notifications.create!(notification_data)
end
##
# Access control is dependent on what has been bookmarked, the appropriate guardian
# can_see_X? method should be called from the bookmarkable class to determine
# whether the bookmarkable record (e.g. Post, Topic) is accessible by the guardian user.
#
# @param [Guardian] guardian The guardian class for the user that we are performing the access check for.
# @param [Bookmark] bookmark The bookmark which we are checking access for using the bookmarkable association.
# @return [Boolean]
def self.can_see?(guardian, bookmark)
raise NotImplementedError
end
##
# Some additional information about the bookmark or the surrounding relations
# may be required when the bookmark is created or destroyed. For example, when
# destroying a bookmark within a topic we need to know whether there are other
# bookmarks still remaining in the topic.
#
# @param [Bookmark] bookmark The bookmark that we are retrieving additional metadata for.
# @param [User] user The current user which is accessing the bookmark metadata.
# @return [Hash] (optional)
def self.bookmark_metadata(bookmark, user)
{}
end
##
# Optional bookmarkable specific validations may need to be run before a bookmark is created
# via the BookmarkManager. From here an error should be raised if there is an issue
# with the bookmarkable.
#
# @param [Guardian] guardian The guardian for the user which is creating the bookmark.
# @param [Model] bookmarkable The ActiveRecord model which is acting as the bookmarkable for the new bookmark.
def self.validate_before_create(guardian, bookmarkable)
# noop
end
##
# Optional additional actions may need to occur after a bookmark is created
# via the BookmarkManager.
#
# @param [Guardian] guardian The guardian for the user which is creating the bookmark.
# @param [Model] bookmark The bookmark which was created.
# @param [Hash] opts Additional options that may be passed down via BookmarkManager.
def self.after_create(guardian, bookmark, opts)
# noop
end
##
# Optional additional actions may need to occur after a bookmark is destroyed
# via the BookmarkManager.
#
# @param [Guardian] guardian The guardian for the user which is destroying the bookmark.
# @param [Model] bookmark The bookmark which was destroyed.
# @param [Hash] opts Additional options that may be passed down via BookmarkManager.
def self.after_destroy(guardian, bookmark, opts)
# noop
end
##
# Some bookmarkable records are Trashable, and as such we don't delete the
# bookmark with dependent_destroy. This should be used to delete those records
# after a grace period, defined by the bookmarkable. For example, post bookmarks
# may be deleted 3 days after the post or topic is deleted.
#
# In the case of bookmarkable records that are not trashable, and where
# dependent_destroy is not used, this should just delete the bookmarks pointing
# to the record which no longer exists in the database.
def self.cleanup_deleted
# noop
end
end
``` | # frozen_string_literal: true
RSpec.describe BaseBookmarkable do
fab!(:bookmark) { Fabricate(:bookmark, bookmarkable: Fabricate(:post)) }
describe "#send_reminder_notification" do
it "raises an error if the data, data.bookmarkable_url, or data.title values are missing from notification_data" do
expect { BaseBookmarkable.send_reminder_notification(bookmark, {}) }.to raise_error(
Discourse::InvalidParameters,
)
expect { BaseBookmarkable.send_reminder_notification(bookmark, { data: {} }) }.to raise_error(
Discourse::InvalidParameters,
)
expect {
BaseBookmarkable.send_reminder_notification(
bookmark,
{ data: { title: "test", bookmarkable_url: "test" } },
)
}.not_to raise_error
end
it "creates a Notification with the required data from the bookmark" do
BaseBookmarkable.send_reminder_notification(
bookmark,
{
topic_id: bookmark.bookmarkable.topic_id,
post_number: bookmark.bookmarkable.post_number,
data: {
title: bookmark.bookmarkable.topic.title,
bookmarkable_url: bookmark.bookmarkable.url,
},
},
)
notif = bookmark.user.notifications.last
expect(notif.notification_type).to eq(Notification.types[:bookmark_reminder])
expect(notif.topic_id).to eq(bookmark.bookmarkable.topic_id)
expect(notif.post_number).to eq(bookmark.bookmarkable.post_number)
expect(notif.data).to eq(
{
title: bookmark.bookmarkable.topic.title,
bookmarkable_url: bookmark.bookmarkable.url,
display_username: bookmark.user.username,
bookmark_name: bookmark.name,
bookmark_id: bookmark.id,
}.to_json,
)
end
it "does not allow the consumer to override display_username, bookmark_name, or bookmark_id" do
BaseBookmarkable.send_reminder_notification(
bookmark,
{
topic_id: bookmark.bookmarkable.topic_id,
post_number: bookmark.bookmarkable.post_number,
data: {
title: bookmark.bookmarkable.topic.title,
bookmarkable_url: bookmark.bookmarkable.url,
display_username: "bad username",
bookmark_name: "bad name",
bookmark_id: -89_854,
},
},
)
notif = bookmark.user.notifications.last
data = JSON.parse(notif[:data])
expect(data[:display_username]).not_to eq("bad username")
expect(data[:name]).not_to eq("bad name")
expect(data[:bookmark_id]).not_to eq(-89_854)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Used as a data source via HashtagAutocompleteService to provide category
# results when looking up a category slug via markdown or searching for
# categories via the # autocomplete character.
class CategoryHashtagDataSource
def self.enabled?
true
end
def self.icon
"folder"
end
def self.type
"category"
end
def self.category_to_hashtag_item(category)
HashtagAutocompleteService::HashtagItem.new.tap do |item|
item.text = category.name
item.slug = category.slug
item.description = category.description_text
item.icon = icon
item.relative_url = category.url
item.id = category.id
# Single-level category hierarchy should be enough to distinguish between
# categories here.
item.ref = category.slug_ref
end
end
def self.lookup(guardian, slugs)
user_categories =
Category
.secured(guardian)
.includes(:parent_category)
.order("parent_category_id ASC NULLS FIRST, id ASC")
Category
.query_loaded_from_slugs(slugs, user_categories)
.map { |category| category_to_hashtag_item(category) }
end
def self.search(
guardian,
term,
limit,
condition = HashtagAutocompleteService.search_conditions[:contains]
)
base_search =
Category
.secured(guardian)
.select(:id, :parent_category_id, :slug, :name, :description)
.includes(:parent_category)
if condition == HashtagAutocompleteService.search_conditions[:starts_with]
base_search = base_search.where("LOWER(slug) LIKE :term", term: "#{term}%")
elsif condition == HashtagAutocompleteService.search_conditions[:contains]
base_search =
base_search.where("LOWER(name) LIKE :term OR LOWER(slug) LIKE :term", term: "%#{term}%")
else
raise Discourse::InvalidParameters.new("Unknown search condition: #{condition}")
end
base_search.take(limit).map { |category| category_to_hashtag_item(category) }
end
def self.search_sort(search_results, term)
if term.present?
search_results.sort_by { |item| [item.slug == term ? 0 : 1, item.text.downcase] }
else
search_results.sort_by { |item| item.text.downcase }
end
end
def self.search_without_term(guardian, limit)
Category
.includes(:parent_category)
.secured(guardian)
.where(
"categories.id NOT IN (#{
CategoryUser
.muted_category_ids_query(guardian.user, include_direct: true)
.select("categories.id")
.to_sql
})",
)
.order(topic_count: :desc)
.take(limit)
.map { |category| category_to_hashtag_item(category) }
end
end
``` | # frozen_string_literal: true
RSpec.describe CategoryHashtagDataSource do
fab!(:parent_category) { Fabricate(:category, slug: "fun", topic_count: 2) }
fab!(:category1) do
Fabricate(:category, slug: "random", topic_count: 12, parent_category: parent_category)
end
fab!(:category2) { Fabricate(:category, name: "Book Section", slug: "books", topic_count: 566) }
fab!(:category3) { Fabricate(:category, slug: "movies", topic_count: 245) }
fab!(:group)
fab!(:category4) { Fabricate(:private_category, slug: "secret", group: group, topic_count: 40) }
fab!(:category5) { Fabricate(:category, slug: "casual", topic_count: 99) }
fab!(:user)
let(:guardian) { Guardian.new(user) }
let(:uncategorized_category) { Category.find(SiteSetting.uncategorized_category_id) }
describe "#lookup" do
it "finds categories using their slug, downcasing for matches" do
result = described_class.lookup(guardian, ["movies"]).first
expect(result.ref).to eq("movies")
expect(result.slug).to eq("movies")
result = described_class.lookup(guardian, ["BoOKs"]).first
expect(result.ref).to eq("books")
expect(result.slug).to eq("books")
end
it "finds categories using the parent:child slug format" do
result = described_class.lookup(guardian, ["fun:random"]).first
expect(result.ref).to eq("fun:random")
expect(result.slug).to eq("random")
end
it "does not find child categories by their standalone slug" do
expect(described_class.lookup(guardian, ["random"]).first).to eq(nil)
end
it "does not find categories the user cannot access" do
expect(described_class.lookup(guardian, ["secret"]).first).to eq(nil)
group.add(user)
expect(described_class.lookup(Guardian.new(user), ["secret"]).first).not_to eq(nil)
end
context "with sub-sub-categories" do
before { SiteSetting.max_category_nesting = 3 }
it "returns the first matching grandchild category (ordered by IDs) when there are multiple categories with the same slug" do
parent1 = Fabricate(:category, slug: "parent1")
parent2 = Fabricate(:category, slug: "parent2")
parent1_child = Fabricate(:category, slug: "child", parent_category_id: parent1.id)
parent1_child_grandchild =
Fabricate(:category, slug: "grandchild", parent_category_id: parent1_child.id)
parent2_child = Fabricate(:category, slug: "child", parent_category_id: parent2.id)
parent2_child_grandchild =
Fabricate(:category, slug: "grandchild", parent_category_id: parent2_child.id)
result = described_class.lookup(guardian, ["child:grandchild"])
expect(result.map(&:relative_url)).to eq([parent1_child_grandchild.url])
parent1_child.destroy
parent1_child = Fabricate(:category, slug: "child", parent_category_id: parent1.id)
result = described_class.lookup(guardian, ["child:grandchild"])
expect(result.map(&:relative_url)).to eq([parent2_child_grandchild.url])
end
it "returns the correct grandchild category when there are multiple children with the same slug and only one of them has the correct grandchild" do
parent1 = Fabricate(:category, slug: "parent1")
parent1_child = Fabricate(:category, slug: "child", parent_category_id: parent1.id)
parent1_child_grandchild =
Fabricate(:category, slug: "another-grandchild", parent_category_id: parent1_child.id)
parent2 = Fabricate(:category, slug: "parent2")
parent2_child = Fabricate(:category, slug: "child", parent_category_id: parent2.id)
parent2_child_grandchild =
Fabricate(:category, slug: "grandchild", parent_category_id: parent2_child.id)
result = described_class.lookup(guardian, ["child:grandchild"])
expect(result.map(&:relative_url)).to eq([parent2_child_grandchild.url])
end
end
end
describe "#search" do
it "finds categories by partial name" do
result = described_class.search(guardian, "mov", 5).first
expect(result.ref).to eq("movies")
expect(result.slug).to eq("movies")
end
it "finds categories by partial slug" do
result = described_class.search(guardian, "ook sec", 5).first
expect(result.ref).to eq("books")
expect(result.slug).to eq("books")
end
it "does not find categories the user cannot access" do
expect(described_class.search(guardian, "secret", 5).first).to eq(nil)
group.add(user)
expect(described_class.search(Guardian.new(user), "secret", 5).first).not_to eq(nil)
end
it "uses the correct ref format for a parent:child category that is found" do
result = described_class.search(guardian, "random", 5).first
expect(result.ref).to eq("fun:random")
expect(result.slug).to eq("random")
end
end
describe "#search_without_term" do
it "returns distinct categories ordered by topic_count" do
expect(described_class.search_without_term(guardian, 5).map(&:slug)).to eq(
%w[books movies casual random fun],
)
end
it "does not return categories the user does not have permission to view" do
expect(described_class.search_without_term(guardian, 5).map(&:slug)).not_to include("secret")
group.add(user)
expect(described_class.search_without_term(Guardian.new(user), 5).map(&:slug)).to include(
"secret",
)
end
it "does not return categories the user has muted" do
CategoryUser.create!(
user: user,
category: category1,
notification_level: CategoryUser.notification_levels[:muted],
)
expect(described_class.search_without_term(guardian, 5).map(&:slug)).not_to include("random")
end
it "does not return child categories where the user has muted the parent" do
CategoryUser.create!(
user: user,
category: parent_category,
notification_level: CategoryUser.notification_levels[:muted],
)
expect(described_class.search_without_term(guardian, 5).map(&:slug)).not_to include("random")
end
end
describe "#search_sort" do
it "orders by exact slug match then text" do
results_to_sort = [
HashtagAutocompleteService::HashtagItem.new(
text: "System Tests",
slug: "system-test-development",
),
HashtagAutocompleteService::HashtagItem.new(text: "Ruby Dev", slug: "ruby-dev"),
HashtagAutocompleteService::HashtagItem.new(text: "Dev", slug: "dev"),
HashtagAutocompleteService::HashtagItem.new(text: "Dev Tools", slug: "dev-tools"),
HashtagAutocompleteService::HashtagItem.new(text: "Dev Lore", slug: "dev-lore"),
]
expect(described_class.search_sort(results_to_sort, "dev").map(&:slug)).to eq(
%w[dev dev-lore dev-tools ruby-dev system-test-development],
)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserAnonymizer
attr_reader :user_history
EMAIL_SUFFIX = "@anonymized.invalid"
# opts:
# anonymize_ip - an optional new IP to update their logs with
def initialize(user, actor = nil, opts = nil)
@user = user
@actor = actor
@user_history = nil
@opts = opts || {}
end
def self.make_anonymous(user, actor = nil, opts = nil)
self.new(user, actor, opts).make_anonymous
end
def make_anonymous
User.transaction do
@prev_email = @user.email
@prev_username = @user.username
unless UsernameChanger.new(@user, make_anon_username).change(run_update_job: false)
raise "Failed to change username"
end
@user.reload
@user.password = SecureRandom.hex
@user.name = SiteSetting.full_name_required ? @user.username : nil
@user.date_of_birth = nil
@user.title = nil
@user.uploaded_avatar_id = nil
if @opts.has_key?(:anonymize_ip)
@user.ip_address = @opts[:anonymize_ip]
@user.registration_ip_address = @opts[:anonymize_ip]
end
@user.save!
@user.primary_email.update_attribute(:email, "#{@user.username}#{EMAIL_SUFFIX}")
options = @user.user_option
options.mailing_list_mode = false
options.email_digests = false
options.email_level = UserOption.email_level_types[:never]
options.email_messages_level = UserOption.email_level_types[:never]
options.save!
if profile = @user.user_profile
profile.update!(
location: nil,
website: nil,
bio_raw: nil,
bio_cooked: nil,
profile_background_upload: nil,
card_background_upload: nil,
)
end
@user.clear_status!
@user.user_avatar&.destroy!
@user.single_sign_on_record&.destroy!
@user.oauth2_user_infos.destroy_all
@user.user_associated_accounts.destroy_all
@user.api_keys.destroy_all
@user.user_api_keys.destroy_all
@user.user_emails.secondary.destroy_all
@user_history = log_action
end
UsernameChanger.update_username(
user_id: @user.id,
old_username: @prev_username,
new_username: @user.username,
avatar_template: @user.avatar_template,
)
Jobs.enqueue(
:anonymize_user,
user_id: @user.id,
prev_email: @prev_email,
anonymize_ip: @opts[:anonymize_ip],
)
DiscourseEvent.trigger(:user_anonymized, user: @user, opts: @opts)
@user
end
private
def make_anon_username
100.times do
new_username = "anon#{(SecureRandom.random_number * 100_000_000).to_i}"
return new_username unless User.where(username_lower: new_username).exists?
end
raise "Failed to generate an anon username"
end
def log_action
history_details = {
action: UserHistory.actions[:anonymize_user],
target_user_id: @user.id,
acting_user_id: @actor ? @actor.id : @user.id,
}
if SiteSetting.log_anonymizer_details?
history_details[:email] = @prev_email
history_details[:details] = "username: #{@prev_username}"
end
UserHistory.create!(history_details)
end
end
``` | # frozen_string_literal: true
RSpec.describe UserAnonymizer do
let(:admin) { Fabricate(:admin) }
describe "event" do
subject(:make_anonymous) do
described_class.make_anonymous(user, admin, anonymize_ip: "2.2.2.2")
end
let(:user) { Fabricate(:user, username: "edward") }
it "triggers the event" do
events = DiscourseEvent.track_events { make_anonymous }
anon_event = events.detect { |e| e[:event_name] == :user_anonymized }
expect(anon_event).to be_present
params_hash = anon_event[:params][0]
expect(params_hash[:user]).to eq(user)
expect(params_hash[:opts][:anonymize_ip]).to eq("2.2.2.2")
end
end
describe ".make_anonymous" do
subject(:make_anonymous) { described_class.make_anonymous(user, admin) }
let(:original_email) { "[email protected]" }
let(:user) { Fabricate(:user, username: "edward", email: original_email) }
fab!(:another_user) { Fabricate(:evil_trout) }
it "changes username" do
make_anonymous
expect(user.reload.username).to match(/^anon\d{3,}$/)
end
it "changes the primary email address" do
make_anonymous
expect(user.reload.email).to eq("#{user.username}@anonymized.invalid")
end
it "changes the primary email address when there is an email domain allowlist" do
SiteSetting.allowed_email_domains = "example.net|wayne.com|discourse.org"
make_anonymous
expect(user.reload.email).to eq("#{user.username}@anonymized.invalid")
end
it "deletes secondary email addresses" do
Fabricate(:secondary_email, user: user, email: "[email protected]")
make_anonymous
expect(user.reload.secondary_emails).to be_blank
end
it "turns off all notifications" do
user.user_option.update_columns(
email_level: UserOption.email_level_types[:always],
email_messages_level: UserOption.email_level_types[:always],
)
make_anonymous
user.reload
expect(user.user_option.email_digests).to eq(false)
expect(user.user_option.email_level).to eq(UserOption.email_level_types[:never])
expect(user.user_option.email_messages_level).to eq(UserOption.email_level_types[:never])
expect(user.user_option.mailing_list_mode).to eq(false)
end
context "when Site Settings do not require full name" do
before { SiteSetting.full_name_required = false }
it "resets profile to default values" do
user.update!(name: "Bibi", date_of_birth: 19.years.ago, title: "Super Star")
profile = user.reload.user_profile
upload = Fabricate(:upload)
profile.update!(
location: "Moose Jaw",
website: "http://www.bim.com",
bio_raw: "I'm Bibi from Moosejaw. I sing and dance.",
bio_cooked: "I'm Bibi from Moosejaw. I sing and dance.",
profile_background_upload: upload,
bio_cooked_version: 2,
card_background_upload: upload,
)
prev_username = user.username
UserAuthToken.generate!(user_id: user.id)
make_anonymous
user.reload
expect(user.username).not_to eq(prev_username)
expect(user.name).not_to be_present
expect(user.date_of_birth).to eq(nil)
expect(user.title).not_to be_present
expect(user.user_auth_tokens.count).to eq(0)
profile = user.reload.user_profile
expect(profile.location).to eq(nil)
expect(profile.website).to eq(nil)
expect(profile.bio_cooked).to eq(nil)
expect(profile.profile_background_upload).to eq(nil)
expect(profile.bio_cooked_version).to eq(UserProfile::BAKED_VERSION)
expect(profile.card_background_upload).to eq(nil)
end
end
it "clears existing user status" do
user_status = Fabricate(:user_status, user: user)
expect do
make_anonymous
user.reload
end.to change { user.user_status }.from(user_status).to(nil)
end
context "when Site Settings require full name" do
before { SiteSetting.full_name_required = true }
it "changes name to anonymized username" do
prev_username = user.username
user.update(name: "Bibi", date_of_birth: 19.years.ago, title: "Super Star")
make_anonymous
user.reload
expect(user.name).not_to eq(prev_username)
expect(user.name).to eq(user.username)
end
end
it "removes the avatar" do
upload = Fabricate(:upload, user: user)
user.user_avatar = UserAvatar.new(user_id: user.id, custom_upload_id: upload.id)
user.uploaded_avatar_id = upload.id # chosen in user preferences
user.save!
make_anonymous
user.reload
expect(user.user_avatar).to eq(nil)
expect(user.uploaded_avatar_id).to eq(nil)
end
it "updates the avatar in posts" do
Jobs.run_immediately!
upload = Fabricate(:upload, user: user)
user.user_avatar = UserAvatar.new(user_id: user.id, custom_upload_id: upload.id)
user.uploaded_avatar_id = upload.id # chosen in user preferences
user.save!
topic = Fabricate(:topic, user: user)
quoted_post = create_post(user: user, topic: topic, post_number: 1, raw: "quoted post")
stub_image_size
post = create_post(raw: <<~RAW)
Lorem ipsum
[quote="#{quoted_post.username}, post:1, topic:#{quoted_post.topic.id}"]
quoted post
[/quote]
RAW
old_avatar_url = user.avatar_template.gsub("{size}", "48")
expect(post.cooked).to include(old_avatar_url)
make_anonymous
post.reload
new_avatar_url = user.reload.avatar_template.gsub("{size}", "48")
expect(post.cooked).to_not include(old_avatar_url)
expect(post.cooked).to include(new_avatar_url)
end
it "logs the action with the original details" do
SiteSetting.log_anonymizer_details = true
helper = UserAnonymizer.new(user, admin)
orig_email = user.email
orig_username = user.username
helper.make_anonymous
history = helper.user_history
expect(history).to be_present
expect(history.email).to eq(orig_email)
expect(history.details).to match(orig_username)
end
it "logs the action without the original details" do
SiteSetting.log_anonymizer_details = false
helper = UserAnonymizer.new(user, admin)
orig_email = user.email
orig_username = user.username
helper.make_anonymous
history = helper.user_history
expect(history).to be_present
expect(history.email).not_to eq(orig_email)
expect(history.details).not_to match(orig_username)
end
it "removes external auth associations" do
user.user_associated_accounts = [
UserAssociatedAccount.create(
user_id: user.id,
provider_uid: "example",
provider_name: "facebook",
),
]
user.single_sign_on_record =
SingleSignOnRecord.create(
user_id: user.id,
external_id: "example",
last_payload: "looks good",
)
make_anonymous
user.reload
expect(user.user_associated_accounts).to be_empty
expect(user.single_sign_on_record).to eq(nil)
end
it "removes api key" do
ApiKey.create!(user_id: user.id)
expect { make_anonymous }.to change { ApiKey.count }.by(-1)
user.reload
expect(user.api_keys).to be_empty
end
it "removes user api key" do
user_api_key = Fabricate(:user_api_key, user: user)
expect { make_anonymous }.to change { UserApiKey.count }.by(-1)
user.reload
expect(user.user_api_keys).to be_empty
end
context "when executing jobs" do
before { Jobs.run_immediately! }
it "removes invites" do
Fabricate(:invited_user, invite: Fabricate(:invite), user: user)
Fabricate(:invited_user, invite: Fabricate(:invite), user: another_user)
expect { make_anonymous }.to change { InvitedUser.count }.by(-1)
expect(InvitedUser.where(user_id: user.id).count).to eq(0)
end
it "removes email tokens" do
Fabricate(:email_token, user: user)
Fabricate(:email_token, user: another_user)
expect { make_anonymous }.to change { EmailToken.count }.by(-1)
expect(EmailToken.where(user_id: user.id).count).to eq(0)
end
it "removes email log entries" do
Fabricate(:email_log, user: user)
Fabricate(:email_log, user: another_user)
expect { make_anonymous }.to change { EmailLog.count }.by(-1)
expect(EmailLog.where(user_id: user.id).count).to eq(0)
end
it "removes incoming emails" do
Fabricate(:incoming_email, user: user, from_address: user.email)
Fabricate(:incoming_email, from_address: user.email, error: "Some error")
Fabricate(:incoming_email, user: another_user, from_address: another_user.email)
expect { make_anonymous }.to change { IncomingEmail.count }.by(-2)
expect(IncomingEmail.where(user_id: user.id).count).to eq(0)
expect(IncomingEmail.where(from_address: original_email).count).to eq(0)
end
it "removes raw email from posts" do
post1 = Fabricate(:post, user: user, via_email: true, raw_email: "raw email from user")
post2 =
Fabricate(
:post,
user: another_user,
via_email: true,
raw_email: "raw email from another user",
)
make_anonymous
expect(post1.reload).to have_attributes(via_email: true, raw_email: nil)
expect(post2.reload).to have_attributes(
via_email: true,
raw_email: "raw email from another user",
)
end
it "does not delete profile views" do
UserProfileView.add(user.id, "127.0.0.1", another_user.id, Time.now, true)
expect { make_anonymous }.to_not change { UserProfileView.count }
end
it "removes user field values" do
field1 = Fabricate(:user_field)
field2 = Fabricate(:user_field)
user.custom_fields = {
some_field: "123",
"user_field_#{field1.id}": "foo",
"user_field_#{field2.id}": "bar",
another_field: "456",
}
expect { make_anonymous }.to change { user.custom_fields }
expect(user.reload.custom_fields).to eq("some_field" => "123", "another_field" => "456")
end
end
end
describe "anonymize_ip" do
let(:old_ip) { "1.2.3.4" }
let(:anon_ip) { "0.0.0.0" }
let(:user) { Fabricate(:user, ip_address: old_ip, registration_ip_address: old_ip) }
fab!(:post)
let(:topic) { post.topic }
it "doesn't anonymize ips by default" do
UserAnonymizer.make_anonymous(user, admin)
expect(user.ip_address).to eq(old_ip)
end
it "is called if you pass an option" do
UserAnonymizer.make_anonymous(user, admin, anonymize_ip: anon_ip)
user.reload
expect(user.ip_address).to eq(anon_ip)
end
it "exhaustively replaces all user ips" do
Jobs.run_immediately!
link = IncomingLink.create!(current_user_id: user.id, ip_address: old_ip, post_id: post.id)
screened_email = ScreenedEmail.create!(email: user.email, ip_address: old_ip)
search_log =
SearchLog.create!(
term: "wat",
search_type: SearchLog.search_types[:header],
user_id: user.id,
ip_address: old_ip,
)
topic_link =
TopicLink.create!(
user_id: admin.id,
topic_id: topic.id,
url: "https://discourse.org",
domain: "discourse.org",
)
topic_link_click =
TopicLinkClick.create!(topic_link_id: topic_link.id, user_id: user.id, ip_address: old_ip)
user_profile_view =
UserProfileView.create!(
user_id: user.id,
user_profile_id: admin.user_profile.id,
ip_address: old_ip,
viewed_at: Time.now,
)
TopicViewItem.create!(
topic_id: topic.id,
user_id: user.id,
ip_address: old_ip,
viewed_at: Time.now,
)
delete_history = StaffActionLogger.new(admin).log_user_deletion(user)
user_history = StaffActionLogger.new(user).log_backup_create
UserAnonymizer.make_anonymous(user, admin, anonymize_ip: anon_ip)
expect(user.registration_ip_address).to eq(anon_ip)
expect(link.reload.ip_address).to eq(anon_ip)
expect(screened_email.reload.ip_address).to eq(anon_ip)
expect(search_log.reload.ip_address).to eq(anon_ip)
expect(topic_link_click.reload.ip_address).to eq(anon_ip)
topic_view = TopicViewItem.where(topic_id: topic.id, user_id: user.id).first
expect(topic_view.ip_address).to eq(anon_ip)
expect(delete_history.reload.ip_address).to eq(anon_ip)
expect(user_history.reload.ip_address).to eq(anon_ip)
expect(user_profile_view.reload.ip_address).to eq(anon_ip)
end
end
describe "anonymize_emails" do
it "destroys all associated invites" do
invite = Fabricate(:invite, email: "[email protected]")
user = invite.redeem
Jobs.run_immediately!
described_class.make_anonymous(user, admin)
expect(user.email).not_to eq("[email protected]")
expect(Invite.exists?(id: invite.id)).to eq(false)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserActivator
attr_reader :user, :request, :session, :cookies, :message
def initialize(user, request, session, cookies)
@user = user
@session = session
@cookies = cookies
@request = request
@message = nil
end
def start
end
def finish
@message = activator.activate
end
def success_message
activator.success_message
end
private
def activator
factory.new(user, request, session, cookies)
end
def factory
invite = Invite.find_by(email: Email.downcase(@user.email))
if !user.active?
EmailActivator
elsif SiteSetting.must_approve_users? && !(invite.present? && invite.redeemable?)
ApprovalActivator
else
LoginActivator
end
end
end
class ApprovalActivator < UserActivator
def activate
success_message
end
def success_message
I18n.t("login.wait_approval")
end
end
class EmailActivator < UserActivator
def activate
email_token = user.email_tokens.create!(email: user.email, scope: EmailToken.scopes[:signup])
EmailToken.enqueue_signup_email(email_token)
success_message
end
def success_message
I18n.t("login.activate_email", email: Rack::Utils.escape_html(user.email))
end
end
class LoginActivator < UserActivator
include CurrentUser
def activate
log_on_user(user)
user.enqueue_welcome_message("welcome_user")
success_message
end
def success_message
I18n.t("login.active")
end
end
``` | # frozen_string_literal: true
RSpec.describe UserActivator do
fab!(:user)
let!(:email_token) { Fabricate(:email_token, user: user) }
describe "email_activator" do
let(:activator) { EmailActivator.new(user, nil, nil, nil) }
it "create email token and enqueues user email" do
now = freeze_time
activator.activate
email_token = user.reload.email_tokens.last
expect(email_token.created_at).to eq_time(now)
job_args = Jobs::CriticalUserEmail.jobs.last["args"].first
expect(job_args["user_id"]).to eq(user.id)
expect(job_args["type"]).to eq("signup")
expect(EmailToken.hash_token(job_args["email_token"])).to eq(email_token.token_hash)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PushNotificationPusher
TOKEN_VALID_FOR_SECONDS ||= 5 * 60
CONNECTION_TIMEOUT_SECONDS = 5
def self.push(user, payload)
message = nil
I18n.with_locale(user.effective_locale) do
notification_icon_name = Notification.types[payload[:notification_type]]
if !File.exist?(
File.expand_path(
"../../app/assets/images/push-notifications/#{notification_icon_name}.png",
__dir__,
),
)
notification_icon_name = "discourse"
end
notification_icon =
ActionController::Base.helpers.image_url("push-notifications/#{notification_icon_name}.png")
message = {
title: payload[:translated_title] || title(payload),
body: payload[:excerpt],
badge: get_badge,
icon: notification_icon,
tag: payload[:tag] || "#{Discourse.current_hostname}-#{payload[:topic_id]}",
base_url: Discourse.base_url,
url: payload[:post_url],
hide_when_active: true,
}
subscriptions(user).each { |subscription| send_notification(user, subscription, message) }
end
message
end
def self.title(payload)
translation_key =
case payload[:notification_type]
when Notification.types[:watching_category_or_tag]
# For watching_category_or_tag, the notification could be for either a new post or new topic.
# Instead of duplicating translations, we can rely on 'watching_first_post' for new topics,
# and 'posted' for new posts.
type = payload[:post_number] == 1 ? "watching_first_post" : "posted"
"discourse_push_notifications.popup.#{type}"
else
"discourse_push_notifications.popup.#{Notification.types[payload[:notification_type]]}"
end
I18n.t(
translation_key,
site_title: SiteSetting.title,
topic: payload[:topic_title],
username: payload[:username],
)
end
def self.subscriptions(user)
user.push_subscriptions
end
def self.clear_subscriptions(user)
user.push_subscriptions.clear
end
def self.subscribe(user, push_params, send_confirmation)
data = push_params.to_json
subscriptions = PushSubscription.where(user: user, data: data)
subscriptions_count = subscriptions.count
new_subscription =
if subscriptions_count > 1
subscriptions.destroy_all
PushSubscription.create!(user: user, data: data)
elsif subscriptions_count == 0
PushSubscription.create!(user: user, data: data)
end
if send_confirmation == "true"
message = {
title:
I18n.t("discourse_push_notifications.popup.confirm_title", site_title: SiteSetting.title),
body: I18n.t("discourse_push_notifications.popup.confirm_body"),
icon: ActionController::Base.helpers.image_url("push-notifications/check.png"),
badge: get_badge,
tag: "#{Discourse.current_hostname}-subscription",
}
send_notification(user, new_subscription, message)
end
end
def self.unsubscribe(user, subscription)
PushSubscription.find_by(user: user, data: subscription.to_json)&.destroy!
end
def self.get_badge
if (url = SiteSetting.site_push_notifications_icon_url).present?
url
else
ActionController::Base.helpers.image_url("push-notifications/discourse.png")
end
end
MAX_ERRORS ||= 3
MIN_ERROR_DURATION ||= 86_400 # 1 day
def self.handle_generic_error(subscription, error, user, endpoint, message)
subscription.error_count += 1
subscription.first_error_at ||= Time.zone.now
delta = Time.zone.now - subscription.first_error_at
if subscription.error_count >= MAX_ERRORS && delta > MIN_ERROR_DURATION
subscription.destroy!
else
subscription.save!
end
Discourse.warn_exception(
error,
message: "Failed to send push notification",
env: {
user_id: user.id,
endpoint: endpoint,
message: message.to_json,
},
)
end
def self.send_notification(user, subscription, message)
parsed_data = subscription.parsed_data
endpoint = parsed_data["endpoint"]
p256dh = parsed_data.dig("keys", "p256dh")
auth = parsed_data.dig("keys", "auth")
if (endpoint.blank? || p256dh.blank? || auth.blank?)
subscription.destroy!
return
end
begin
WebPush.payload_send(
endpoint: endpoint,
message: message.to_json,
p256dh: p256dh,
auth: auth,
vapid: {
subject: Discourse.base_url,
public_key: SiteSetting.vapid_public_key,
private_key: SiteSetting.vapid_private_key,
expiration: TOKEN_VALID_FOR_SECONDS,
},
open_timeout: CONNECTION_TIMEOUT_SECONDS,
read_timeout: CONNECTION_TIMEOUT_SECONDS,
ssl_timeout: CONNECTION_TIMEOUT_SECONDS,
)
if subscription.first_error_at || subscription.error_count != 0
subscription.update_columns(error_count: 0, first_error_at: nil)
end
rescue WebPush::ExpiredSubscription
subscription.destroy!
rescue WebPush::ResponseError => e
if e.response.message == "MismatchSenderId"
subscription.destroy!
else
handle_generic_error(subscription, e, user, endpoint, message)
end
rescue Timeout::Error => e
handle_generic_error(subscription, e, user, endpoint, message)
rescue OpenSSL::SSL::SSLError => e
handle_generic_error(subscription, e, user, endpoint, message)
end
end
private_class_method :send_notification
private_class_method :handle_generic_error
end
``` | # frozen_string_literal: true
RSpec.describe PushNotificationPusher do
it "returns badges url by default" do
expect(PushNotificationPusher.get_badge).to eq("/assets/push-notifications/discourse.png")
end
it "returns custom badges url" do
upload = Fabricate(:upload)
SiteSetting.push_notifications_icon = upload
expect(PushNotificationPusher.get_badge).to eq(UrlHelper.absolute(upload.url))
end
context "with user" do
fab!(:user)
let(:topic_title) { "Topic" }
let(:username) { "system" }
def create_subscription
data = <<~JSON
{
"endpoint": "endpoint",
"keys": {
"p256dh": "p256dh",
"auth": "auth"
}
}
JSON
PushSubscription.create!(user_id: user.id, data: data)
end
def execute_push(notification_type: 1, post_number: 1)
PushNotificationPusher.push(
user,
{
topic_title: topic_title,
username: username,
excerpt: "description",
topic_id: 1,
post_url: "https://example.com/t/1/2",
notification_type: notification_type,
post_number: post_number,
},
)
end
it "correctly guesses an image if missing" do
message = execute_push(notification_type: -1)
expect(message[:icon]).to eq("/assets/push-notifications/discourse.png")
end
it "correctly finds image if exists" do
message = execute_push(notification_type: 1)
expect(message[:icon]).to eq("/assets/push-notifications/mentioned.png")
end
it "sends notification in user's locale" do
SiteSetting.allow_user_locale = true
user.update!(locale: "pt_BR")
TranslationOverride.upsert!(
"pt_BR",
"discourse_push_notifications.popup.mentioned",
"pt_BR notification",
)
WebPush
.expects(:payload_send)
.with { |*args| JSON.parse(args.first[:message])["title"] == "pt_BR notification" }
.once
create_subscription
execute_push
end
it "deletes subscriptions which are erroring regularly" do
start = freeze_time
sub = create_subscription
response = Struct.new(:body, :inspect, :message).new("test", "test", "failed")
error = WebPush::ResponseError.new(response, "localhost")
WebPush.expects(:payload_send).raises(error).times(4)
# 3 failures in more than 24 hours
3.times do
execute_push
freeze_time 1.minute.from_now
end
sub.reload
expect(sub.error_count).to eq(3)
expect(sub.first_error_at).to eq_time(start)
freeze_time(2.days.from_now)
execute_push
expect(PushSubscription.where(id: sub.id).exists?).to eq(false)
end
it "deletes invalid subscriptions during send" do
missing_endpoint =
PushSubscription.create!(
user_id: user.id,
data: { p256dh: "public ECDH key", keys: { auth: "private ECDH key" } }.to_json,
)
missing_p256dh =
PushSubscription.create!(
user_id: user.id,
data: { endpoint: "endpoint 1", keys: { auth: "private ECDH key" } }.to_json,
)
missing_auth =
PushSubscription.create!(
user_id: user.id,
data: { endpoint: "endpoint 2", keys: { p256dh: "public ECDH key" } }.to_json,
)
valid_subscription =
PushSubscription.create!(
user_id: user.id,
data: {
endpoint: "endpoint 3",
keys: {
p256dh: "public ECDH key",
auth: "private ECDH key",
},
}.to_json,
)
expect(PushSubscription.where(user_id: user.id)).to contain_exactly(
missing_endpoint,
missing_p256dh,
missing_auth,
valid_subscription,
)
WebPush
.expects(:payload_send)
.with(
has_entries(endpoint: "endpoint 3", p256dh: "public ECDH key", auth: "private ECDH key"),
)
.once
execute_push
expect(PushSubscription.where(user_id: user.id)).to contain_exactly(valid_subscription)
end
it "handles timeouts" do
WebPush.expects(:payload_send).raises(Net::ReadTimeout.new)
subscription = create_subscription
expect { execute_push }.to_not raise_exception
subscription.reload
expect(subscription.error_count).to eq(1)
end
describe "`watching_category_or_tag` notifications" do
it "Uses the 'watching_first_post' translation when new topic was created" do
message =
execute_push(
notification_type: Notification.types[:watching_category_or_tag],
post_number: 1,
)
expect(message[:title]).to eq(
I18n.t(
"discourse_push_notifications.popup.watching_first_post",
site_title: SiteSetting.title,
topic: topic_title,
username: username,
),
)
end
it "Uses the 'posted' translation when new post was created" do
message =
execute_push(
notification_type: Notification.types[:watching_category_or_tag],
post_number: 2,
)
expect(message[:title]).to eq(
I18n.t(
"discourse_push_notifications.popup.posted",
site_title: SiteSetting.title,
topic: topic_title,
username: username,
),
)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class AnonymousShadowCreator
attr_reader :user
def self.get_master(user)
new(user).get_master
end
def self.get(user)
new(user).get
end
def initialize(user)
@user = user
end
def get_master
return unless user
return unless SiteSetting.allow_anonymous_posting
user.master_user
end
def get
return unless user
return unless SiteSetting.allow_anonymous_posting
return if !user.in_any_groups?(SiteSetting.anonymous_posting_allowed_groups_map)
return if SiteSetting.must_approve_users? && !user.approved?
shadow = user.shadow_user
if shadow && (shadow.post_count + shadow.topic_count) > 0 && shadow.last_posted_at &&
shadow.last_posted_at < SiteSetting.anonymous_account_duration_minutes.minutes.ago
shadow = nil
end
shadow || create_shadow!
end
private
def create_shadow!
username = resolve_username
User.transaction do
shadow =
User.create!(
password: SecureRandom.hex,
email: "#{SecureRandom.hex}@anon.#{Discourse.current_hostname}",
skip_email_validation: true,
name: username, # prevents error when names are required
username: username,
active: true,
trust_level: 1,
manual_locked_trust_level: 1,
approved: true,
approved_at: 1.day.ago,
created_at: 1.day.ago, # bypass new user restrictions
)
shadow.user_option.update_columns(
email_messages_level: UserOption.email_level_types[:never],
email_digests: false,
)
shadow.email_tokens.update_all(confirmed: true)
shadow.activate
AnonymousUser.where(master_user_id: user.id, active: true).update_all(active: false)
AnonymousUser.create!(user_id: shadow.id, master_user_id: user.id, active: true)
shadow.reload
user.reload
shadow
end
end
def resolve_username
username = I18n.t("anonymous").downcase
username = "anonymous" unless UserNameSuggester.sanitize_username(username).present?
UserNameSuggester.suggest(username)
end
end
``` | # frozen_string_literal: true
RSpec.describe AnonymousShadowCreator do
it "returns no shadow by default" do
expect(AnonymousShadowCreator.get(Fabricate.build(:user))).to eq(nil)
end
context "when anonymous posting is enabled" do
fab!(:user) { Fabricate(:user, trust_level: 3) }
before do
SiteSetting.allow_anonymous_posting = true
SiteSetting.anonymous_posting_allowed_groups = "11"
Group.refresh_automatic_groups!
end
it "returns no shadow if the user is not in a group that is allowed to anonymously post" do
user = Fabricate(:user, trust_level: 0)
Group.refresh_automatic_groups!
expect(AnonymousShadowCreator.get(user)).to eq(nil)
end
it "returns no shadow if must_approve_users is true and user is not approved" do
SiteSetting.must_approve_users = true
expect(AnonymousShadowCreator.get(Fabricate.build(:user, approved: false))).to eq(nil)
end
it "returns a new shadow once time expires" do
SiteSetting.anonymous_account_duration_minutes = 1
shadow = AnonymousShadowCreator.get(user)
freeze_time 2.minutes.from_now
shadow2 = AnonymousShadowCreator.get(user)
expect(shadow.id).to eq(shadow2.id)
create_post(user: shadow)
user.reload
shadow.reload
freeze_time 4.minutes.from_now
shadow3 = AnonymousShadowCreator.get(user)
expect(shadow3.user_option.email_digests).to eq(false)
expect(shadow3.user_option.email_messages_level).to eq(UserOption.email_level_types[:never])
expect(shadow2.id).not_to eq(shadow3.id)
end
it "returns a shadow for a legit user" do
shadow = AnonymousShadowCreator.get(user)
shadow2 = AnonymousShadowCreator.get(user)
expect(shadow.id).to eq(shadow2.id)
expect(shadow.trust_level).to eq(1)
expect(shadow.username).to eq("anonymous")
expect(shadow.created_at).not_to eq_time(user.created_at)
p = create_post
expect(Guardian.new(shadow).post_can_act?(p, :like)).to eq(false)
expect(Guardian.new(user).post_can_act?(p, :like)).to eq(true)
expect(user.anonymous?).to eq(false)
expect(shadow.anonymous?).to eq(true)
end
it "works even when names are required" do
SiteSetting.full_name_required = true
expect { AnonymousShadowCreator.get(user) }.to_not raise_error
end
it "works when there is an email allowlist" do
SiteSetting.allowed_email_domains = "wayne.com"
expect { AnonymousShadowCreator.get(user) }.to_not raise_error
end
it "falls back to username 'anonymous' if the translation for 'anonymous' consists entirely of disallowed characters" do
# use russian locale but do not allow russian characters:
I18n.locale = :ru
SiteSetting.unicode_usernames = true
SiteSetting.allowed_unicode_username_characters = "[äöü]"
shadow = AnonymousShadowCreator.get(user)
expect(shadow.username).to eq("anonymous")
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class InlineUploads
PLACEHOLDER = "__replace__"
PATH_PLACEHOLDER = "__replace_path__"
UPLOAD_REGEXP_PATTERN = "/original/(\\dX/(?:\\h/)*\\h{40}[a-zA-Z0-9.]*)(\\?v=\\d+)?"
private_constant :UPLOAD_REGEXP_PATTERN
def self.process(markdown, on_missing: nil)
markdown = markdown.dup
match_md_reference(markdown) do |match, src, replacement, index|
if upload = Upload.get_from_url(src)
markdown = markdown.sub(match, replacement.sub!(PATH_PLACEHOLDER, "__#{upload.sha1}__"))
end
end
cooked_fragment = Nokogiri::HTML5.fragment(PrettyText.cook(markdown, disable_emojis: true))
link_occurrences = []
cooked_fragment.traverse do |node|
if node.name == "img"
# Do nothing
elsif !(
node.children.count == 1 &&
(node.children[0].name != "img" && node.children[0].children.blank?)
) &&
!(
node.name == "a" && node.children.count > 1 &&
!node_children_names(node).include?("img")
)
next
end
if seen_link = matched_uploads(node).first
if (actual_link = (node.attributes["href"]&.value || node.attributes["src"]&.value))
link_occurrences << { link: actual_link, is_valid: true }
elsif node.name != "p"
link_occurrences << { link: seen_link, is_valid: false }
end
end
end
raw_matches = []
match_bbcode_img(markdown) do |match, src, replacement, index|
raw_matches << [match, src, replacement, index]
end
match_md_inline_img(markdown) do |match, src, replacement, index|
raw_matches << [match, src, replacement, index]
end
match_img(markdown) do |match, src, replacement, index|
raw_matches << [match, src, replacement, index]
end
match_anchor(markdown) do |match, href, replacement, index|
raw_matches << [match, href, replacement, index]
end
regexps = [
%r{(https?://[a-zA-Z0-9\./-]+/#{Discourse.store.upload_path}#{UPLOAD_REGEXP_PATTERN})},
]
if Discourse.store.external?
regexps << /((?:https?:)?#{SiteSetting.Upload.s3_base_url}#{UPLOAD_REGEXP_PATTERN})/
regexps << /(#{SiteSetting.Upload.s3_cdn_url}#{UPLOAD_REGEXP_PATTERN})/
end
regexps.each do |regexp|
indexes = Set.new
markdown.scan(/(\n{2,}|\A)#{regexp}$/) do |match|
if match[1].present? && match[2].present?
extension = match[2].split(".")[-1].downcase
index = $~.offset(2)[0]
indexes << index
if FileHelper.supported_images.include?(extension)
raw_matches << [match[1], match[1], +"![](#{PLACEHOLDER})", index]
else
raw_matches << [match[1], match[1], +"#{Discourse.base_url}#{PATH_PLACEHOLDER}", index]
end
end
end
markdown.scan(/^#{regexp}(\s)/) do |match|
if match[0].present?
index = $~.offset(0)[0]
next if !indexes.add?(index)
raw_matches << [match[0], match[0], +"#{Discourse.base_url}#{PATH_PLACEHOLDER}", index]
end
end
markdown.scan(/\[[^\[\]]*\]: #{regexp}/) do |match|
indexes.add($~.offset(1)[0]) if match[0].present?
end
markdown.scan(/(([\n\s\)\]\<])+)#{regexp}/) do |match|
if matched_uploads(match[2]).present?
next if !indexes.add?($~.offset(3)[0])
index = $~.offset(0)[0]
raw_matches << [match[2], match[2], +"#{Discourse.base_url}#{PATH_PLACEHOLDER}", index]
end
end
end
raw_matches
.sort { |a, b| a[3] <=> b[3] }
.each do |match, link, replace_with, _index|
node_info = link_occurrences.shift
next unless node_info&.dig(:is_valid)
if link.include?(node_info[:link])
begin
uri = URI(link)
rescue URI::Error
end
if !Discourse.store.external?
host = uri&.host
hosts = [Discourse.current_hostname]
if cdn_url = GlobalSetting.cdn_url
hosts << URI(GlobalSetting.cdn_url).hostname
end
next if host && !hosts.include?(host)
end
upload = Upload.get_from_url(link)
if upload
replace_with.sub!(PLACEHOLDER, upload.short_url)
replace_with.sub!(PATH_PLACEHOLDER, upload.short_path)
markdown.sub!(match, replace_with)
else
on_missing.call(link) if on_missing
end
end
end
markdown.scan(/(__(\h{40})__)/) do |match|
upload = Upload.find_by(sha1: match[1])
markdown = markdown.sub(match[0], upload.short_path)
end
markdown
end
def self.match_md_inline_img(markdown, external_src: false)
markdown.scan(/(!?\[([^\[\]]*)\]\(([^\s\)]+)([ ]*['"]{1}[^\)]*['"]{1}[ ]*)?\))/) do |match|
if (external_src || matched_uploads(match[2]).present?) && block_given?
yield(
match[0],
match[2],
+"#{match[0].start_with?("!") ? "!" : ""}[#{match[1]}](#{PLACEHOLDER}#{match[3]})",
$~.offset(0)[0]
)
end
end
end
def self.match_bbcode_img(markdown, external_src: false)
markdown.scan(%r{(\[img\]\s*([^\[\]\s]+)\s*\[/img\])}i) do |match|
if (external_src || (matched_uploads(match[1]).present?)) && block_given?
yield(match[0], match[1], +"![](#{PLACEHOLDER})", $~.offset(0)[0])
end
end
end
def self.match_md_reference(markdown)
markdown.scan(/(\[([^\]]+)\]:([ ]+)(\S+))/) do |match|
if match[3] && matched_uploads(match[3]).present? && block_given?
yield(
match[0],
match[3],
+"[#{match[1]}]:#{match[2]}#{Discourse.base_url}#{PATH_PLACEHOLDER}",
$~.offset(0)[0]
)
end
end
end
def self.match_anchor(markdown, external_href: false)
markdown.scan(%r{((<a[^<]+>)([^<\a>]*?)</a>)}i) do |match|
node = Nokogiri::HTML5.fragment(match[0]).children[0]
href = node.attributes["href"]&.value
if href && (external_href || matched_uploads(href).present?)
has_attachment = node.attributes["class"]&.value
index = $~.offset(0)[0]
text = match[2].strip.gsub("\n", "").gsub(/ +/, " ")
text = "#{text}|attachment" if has_attachment
yield(match[0], href, +"[#{text}](#{PLACEHOLDER})", index) if block_given?
end
end
end
def self.match_img(markdown, external_src: false, uploads: nil)
markdown.scan(%r{(<(?!img)[^<>]+/?>)?(\s*)(<img [^>\n]+>)}i) do |match|
node = Nokogiri::HTML5.fragment(match[2].strip).children[0]
src = node&.attributes&.[]("src")&.value
if src && (external_src || matched_uploads(src).present?)
upload = uploads&.[](src)
node["src"] = upload&.short_url || PLACEHOLDER
spaces_before = match[1].present? ? match[1][/ +$/].size : 0
replacement = +"#{" " * spaces_before}#{node.to_s}"
yield(match[2], src, replacement, $~.offset(0)[0]) if block_given?
end
end
end
def self.replace_hotlinked_image_urls(raw:, &blk)
replace =
Proc.new do |match, match_src, replacement, _index|
upload = blk.call(match_src)
next if !upload
replacement =
if replacement.include?(InlineUploads::PLACEHOLDER)
replacement.sub(InlineUploads::PLACEHOLDER, upload.short_url)
elsif replacement.include?(InlineUploads::PATH_PLACEHOLDER)
replacement.sub(InlineUploads::PATH_PLACEHOLDER, upload.short_path)
end
raw = raw.gsub(match, replacement)
end
# there are 6 ways to insert an image in a post
# HTML tag - <img src="http://...">
InlineUploads.match_img(raw, external_src: true, &replace)
# BBCode tag - [img]http://...[/img]
InlineUploads.match_bbcode_img(raw, external_src: true, &replace)
# Markdown linked image - [![alt](http://...)](http://...)
# Markdown inline - ![alt](http://...)
# Markdown inline - ![](http://... "image title")
# Markdown inline - ![alt](http://... "image title")
InlineUploads.match_md_inline_img(raw, external_src: true, &replace)
raw =
raw.gsub(%r{^(https?://\S+)(\s?)$}) do |match|
if upload = blk.call(match)
"![](#{upload.short_url})"
else
match
end
end
raw
end
def self.matched_uploads(node)
upload_path = Discourse.store.upload_path
base_url = Discourse.base_url.sub(%r{https?://}, "(https?://)")
regexps = [
%r{(upload://([a-zA-Z0-9]+)[a-zA-Z0-9\.]*)},
%r{(/uploads/short-url/([a-zA-Z0-9]+)[a-zA-Z0-9\.]*)},
%r{(#{base_url}/uploads/short-url/([a-zA-Z0-9]+)[a-zA-Z0-9\.]*)},
%r{(#{GlobalSetting.relative_url_root}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})},
%r{(#{base_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})},
]
if GlobalSetting.cdn_url && (cdn_url = GlobalSetting.cdn_url.sub(%r{https?://}, "(https?://)"))
regexps << %r{(#{cdn_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
if GlobalSetting.relative_url_root.present?
regexps << %r{(#{cdn_url}#{GlobalSetting.relative_url_root}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
end
end
if Discourse.store.external?
if Rails.configuration.multisite
regexps << %r{((https?:)?#{SiteSetting.Upload.s3_base_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
regexps << %r{(#{SiteSetting.Upload.s3_cdn_url}/#{upload_path}#{UPLOAD_REGEXP_PATTERN})}
else
regexps << /((https?:)?#{SiteSetting.Upload.s3_base_url}#{UPLOAD_REGEXP_PATTERN})/
regexps << /(#{SiteSetting.Upload.s3_cdn_url}#{UPLOAD_REGEXP_PATTERN})/
end
end
matches = []
node = node.to_s
regexps.each do |regexp|
node.scan(/(^|[\n\s"'\(>])#{regexp}($|[\n\s"'\)<])/) { |matched| matches << matched[1] }
end
matches
end
private_class_method :matched_uploads
def self.node_children_names(node, names = Set.new)
if node.children.blank?
names << node.name
return names
end
node.children.each { |child| names = node_children_names(child, names) }
names
end
private_class_method :node_children_names
end
``` | # frozen_string_literal: true
RSpec.describe InlineUploads do
before { set_cdn_url "https://awesome.com" }
describe ".process" do
context "with local uploads" do
fab!(:upload)
fab!(:upload2) { Fabricate(:upload) }
fab!(:upload3) { Fabricate(:upload) }
it "should not correct existing inline uploads" do
md = <<~MD
![test](#{upload.short_url})haha
[test]#{upload.short_url}
MD
expect(InlineUploads.process(md)).to eq(md)
md = <<~MD
![test](#{upload.short_url})
[test|attachment](#{upload.short_url})
MD
expect(InlineUploads.process(md)).to eq(md)
end
it "should not escape existing content" do
md = "1 > 2"
expect(InlineUploads.process(md)).to eq(md)
end
it "should not escape invalid HTML tags" do
md = "<x>.<y>"
expect(InlineUploads.process(md)).to eq(md)
end
it "should work with invalid img tags" do
md = <<~MD
<img src="#{upload.url}">
This is an invalid `<img ...>` tag
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
<img src="#{upload.short_url}">
This is an invalid `<img ...>` tag
MD
md = '<img data-id="<>">'
expect(InlineUploads.process(md)).to eq(md)
end
it "should not correct code blocks" do
md = "`<a class=\"attachment\" href=\"#{upload2.url}\">In Code Block</a>`"
expect(InlineUploads.process(md)).to eq(md)
md = " <a class=\"attachment\" href=\"#{upload2.url}\">In Code Block</a>"
expect(InlineUploads.process(md)).to eq(md)
end
it "should not correct invalid links in quotes" do
post = Fabricate(:post)
user = Fabricate(:user)
md = <<~MD
[quote="#{user.username}, post:#{post.post_number}, topic:#{post.topic.id}"]
<img src="#{upload.url}"
someothertext#{upload2.url}someothertext
<img src="#{upload.url}"
sometext#{upload2.url}sometext
#{upload3.url}
#{Discourse.base_url}#{upload3.url}
[/quote]
<img src="#{upload2.url}">
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
[quote="#{user.username}, post:#{post.post_number}, topic:#{post.topic.id}"]
<img src="#{upload.url}"
someothertext#{upload2.url}someothertext
<img src="#{upload.url}"
sometext#{upload2.url}sometext
#{upload3.url}
![](#{upload3.short_url})
[/quote]
<img src="#{upload2.short_url}">
MD
end
it "should correct links in quotes" do
post = Fabricate(:post)
user = Fabricate(:user)
md = <<~MD
[quote="#{user.username}, post:#{post.post_number}, topic:#{post.topic.id}"]
some quote
#{Discourse.base_url}#{upload3.url}
![](#{upload.url})
[/quote]
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
[quote="#{user.username}, post:#{post.post_number}, topic:#{post.topic.id}"]
some quote
![](#{upload3.short_url})
![](#{upload.short_url})
[/quote]
MD
end
it "should correct markdown linked images" do
md = <<~MD
[![](#{upload.url})](https://somelink.com)
[![some test](#{upload2.url})](https://somelink.com)
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
[![](#{upload.short_url})](https://somelink.com)
[![some test](#{upload2.short_url})](https://somelink.com)
MD
end
it "should correct markdown images with title" do
md = <<~MD
![](#{upload.url} "some alt")
![testing](#{upload2.url} 'some alt' )
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
![](#{upload.short_url} "some alt")
![testing](#{upload2.short_url} 'some alt' )
MD
end
it "should correct bbcode img URLs to the short version" do
md = <<~MD
[img]http://some.external.img[/img]
[img]#{upload.url}[/img]
<img src="#{upload3.url}">
[img]
#{upload2.url}
[/img]
[img]#{upload.url}[/img][img]#{upload2.url}[/img]
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
[img]http://some.external.img[/img]
![](#{upload.short_url})
<img src="#{upload3.short_url}">
![](#{upload2.short_url})
![](#{upload.short_url})![](#{upload2.short_url})
MD
end
it "should correct markdown references" do
md = <<~MD
[link3][3]
[3]: #{Discourse.base_url}#{upload2.url}
This is a [link1][1] test [link2][2] something
<img src="#{upload.url}">
[1]: #{Discourse.base_url}#{upload.url}
[2]: #{Discourse.base_url.sub("http://", "https://")}#{upload2.url}
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
[link3][3]
[3]: #{Discourse.base_url}#{upload2.short_path}
This is a [link1][1] test [link2][2] something
<img src="#{upload.short_url}">
[1]: #{Discourse.base_url}#{upload.short_path}
[2]: #{Discourse.base_url}#{upload2.short_path}
MD
end
it "should correct html and markdown uppercase references" do
md = <<~MD
[IMG]#{upload.url}[/IMG]
<IMG src="#{upload2.url}" />
<A class="attachment" href="#{upload3.url}">Text</A>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
![](#{upload.short_url})
<img src="#{upload2.short_url}">
[Text|attachment](#{upload3.short_url})
MD
end
it "should correct image URLs with v parameters" do
md = <<~MD
<img src="#{upload.url}?v=1">
<img src="#{Discourse.base_url}#{upload.url}?v=2">
<img src="#{GlobalSetting.cdn_url}#{upload.url}?v=3">
#{Discourse.base_url}#{upload.url}?v=45
#{GlobalSetting.cdn_url}#{upload.url}?v=999
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
<img src="#{upload.short_url}">
<img src="#{upload.short_url}">
<img src="#{upload.short_url}">
![](#{upload.short_url})
![](#{upload.short_url})
MD
end
context "with subfolder" do
before { set_subfolder "/community" }
it "should correct subfolder images" do
md = <<~MD
<img src="/community#{upload.url}">
#{Discourse.base_url}#{upload.url}
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
<img src="#{upload.short_url}">
![](#{upload.short_url})
MD
end
end
it "should correct raw image URLs to the short url and paths" do
md = <<~MD
#{Discourse.base_url}#{upload.url}
#{Discourse.base_url}#{upload.url} #{Discourse.base_url}#{upload2.url}
#{Discourse.base_url}#{upload3.url}
#{GlobalSetting.cdn_url}#{upload3.url}
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
![](#{upload.short_url})
#{Discourse.base_url}#{upload.short_path} #{Discourse.base_url}#{upload2.short_path}
![](#{upload3.short_url})
![](#{upload3.short_url})
MD
end
it "should correct non image URLs to the short url" do
SiteSetting.authorized_extensions = "mp4"
upload = Fabricate(:video_upload)
upload2 = Fabricate(:video_upload)
md = <<~MD
#{Discourse.base_url}#{upload.url}
#{Discourse.base_url}#{upload.url} #{Discourse.base_url}#{upload2.url}
#{GlobalSetting.cdn_url}#{upload2.url}
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
#{Discourse.base_url}#{upload.short_path}
#{Discourse.base_url}#{upload.short_path} #{Discourse.base_url}#{upload2.short_path}
#{Discourse.base_url}#{upload2.short_path}
MD
end
it "should correct img tags with uppercase upload extension" do
md = <<~MD
test<img src="#{upload.url.sub(".png", ".PNG")}">
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
test<img src="#{upload.short_url}">
MD
end
it "should correct image URLs that follows an image md" do
md = <<~MD
![image|690x290](#{upload.short_url})#{Discourse.base_url}#{upload2.url}
<#{Discourse.base_url}#{upload2.url}>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
![image|690x290](#{upload.short_url})#{Discourse.base_url}#{upload2.short_path}
<#{Discourse.base_url}#{upload2.short_path}>
MD
end
it "should correct image URLs to the short version" do
md = <<~MD
![image|690x290](#{upload.short_url})
![IMAge|690x190,60%](#{upload.short_url})
![image](#{upload2.url})
![image|100x100](#{upload3.url})
<img src="#{Discourse.base_url}#{upload.url}" alt="some image" title="some title" />
<img src="#{Discourse.base_url}#{upload2.url}" alt="some image"><img src="#{Discourse.base_url}#{upload3.url}" alt="some image">
#{Discourse.base_url}#{upload3.url} #{Discourse.base_url}#{upload3.url}
<img src="#{upload.url}" width="5" height="4">
<img src="#{upload.url}" width="5px" height="auto">
`<img src="#{upload.url}" alt="image inside code quotes">`
```
<img src="#{upload.url}" alt="image inside code fences">
```
<img src="#{upload.url}" alt="image inside code block">
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
![image|690x290](#{upload.short_url})
![IMAge|690x190,60%](#{upload.short_url})
![image](#{upload2.short_url})
![image|100x100](#{upload3.short_url})
<img src="#{upload.short_url}" alt="some image" title="some title">
<img src="#{upload2.short_url}" alt="some image"><img src="#{upload3.short_url}" alt="some image">
#{Discourse.base_url}#{upload3.short_path} #{Discourse.base_url}#{upload3.short_path}
<img src="#{upload.short_url}" width="5" height="4">
<img src="#{upload.short_url}" width="5px" height="auto">
`<img src="#{upload.url}" alt="image inside code quotes">`
```
<img src="#{upload.url}" alt="image inside code fences">
```
<img src="#{upload.url}" alt="image inside code block">
MD
end
it "should not replace identical markdown in code blocks", skip: "Known issue" do
md = <<~MD
`![image|690x290](#{upload.url})`
![image|690x290](#{upload.url})
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
`![image|690x290](#{upload.url})`
![image|690x290](#{upload.short_url})
MD
end
it "should not be affected by an emoji" do
CustomEmoji.create!(name: "test", upload: upload3)
Emoji.clear_cache
md = <<~MD
:test:
![image|690x290](#{upload.url})
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
:test:
![image|690x290](#{upload.short_url})
MD
end
it "should correctly update images sources within anchor tags with indentation" do
md = <<~MD
<h1></h1>
<a href="http://somelink.com">
<img src="#{upload2.url}" alt="test" width="500" height="500">
</a>
<a href="http://somelink.com">
<img src="#{upload2.url}" alt="test" width="500" height="500">
</a>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
<h1></h1>
<a href="http://somelink.com">
<img src="#{upload2.short_url}" alt="test" width="500" height="500">
</a>
<a href="http://somelink.com">
<img src="#{upload2.url}" alt="test" width="500" height="500">
</a>
MD
md =
"<h1></h1>\r\n<a href=\"http://somelink.com\">\r\n <img src=\"#{upload.url}\" alt=\"test\" width=\"500\" height=\"500\">\r\n</a>"
expect(InlineUploads.process(md)).to eq(
"<h1></h1>\r\n<a href=\"http://somelink.com\">\r\n <img src=\"#{upload.short_url}\" alt=\"test\" width=\"500\" height=\"500\">\r\n</a>",
)
end
it "should correctly update image sources within anchor or paragraph tags" do
md = <<~MD
<a href="http://somelink.com">
<img src="#{upload.url}" alt="test" width="500" height="500">
</a>
<p>
<img src="#{upload2.url}" alt="test">
</p>
<a href="http://somelink.com"><img src="#{upload3.url}" alt="test" width="500" height="500"></a>
<a href="http://somelink.com"> <img src="#{upload.url}" alt="test" width="500" height="500"> </a>
<a href="http://somelink.com">
<img src="#{upload.url}" alt="test" width="500" height="500">
</a>
<p>Test <img src="#{upload2.url}" alt="test" width="500" height="500"></p>
<hr/>
<img src="#{upload2.url}" alt="test" width="500" height="500">
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
<a href="http://somelink.com">
<img src="#{upload.short_url}" alt="test" width="500" height="500">
</a>
<p>
<img src="#{upload2.short_url}" alt="test">
</p>
<a href="http://somelink.com"><img src="#{upload3.short_url}" alt="test" width="500" height="500"></a>
<a href="http://somelink.com"> <img src="#{upload.short_url}" alt="test" width="500" height="500"> </a>
<a href="http://somelink.com">
<img src="#{upload.short_url}" alt="test" width="500" height="500">
</a>
<p>Test <img src="#{upload2.short_url}" alt="test" width="500" height="500"></p>
<hr/>
<img src="#{upload2.short_url}" alt="test" width="500" height="500">
MD
end
it "should not be affected by fake HTML tags" do
md = <<~MD
```
This is some <img src=" and <a href="
```
<img src="#{upload.url}" alt="test">
<img src="#{upload2.url}" alt="test" height="150<img">
> some quote
<a class="attachment" href="#{upload2.url}">test2</a>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
```
This is some <img src=" and <a href="
```
<img src="#{upload.short_url}" alt="test">
<img src="#{upload2.short_url}" alt="test" height="150<img">
> some quote
[test2|attachment](#{upload2.short_url})
MD
end
it "should not be affected by an external or invalid links" do
md = <<~MD
<a id="test">invalid</a>
[test]("https://this.is.some.external.link")
<a href="https://some.external.com/link">test</a>
<a class="attachment" href="#{upload2.url}">test2</a>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
<a id="test">invalid</a>
[test]("https://this.is.some.external.link")
<a href="https://some.external.com/link">test</a>
[test2|attachment](#{upload2.short_url})
MD
end
it "should correct attachment URLS to the short version when raw contains inline image" do
md = <<~MD
![image](#{upload.short_url}) ![image](#{upload.short_url})
[some complicated.doc %50](#{upload3.url})
<a class="attachment" href="#{upload2.url}">test2</a>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
![image](#{upload.short_url}) ![image](#{upload.short_url})
[some complicated.doc %50](#{upload3.short_url})
[test2|attachment](#{upload2.short_url})
MD
end
it "should correct attachment URLs to the short version" do
md = <<~MD
<a class="attachment" href="#{upload.url}">
this
is
some
attachment
</a>
- <a class="attachment" href="#{upload.url}">test2</a>
- <a class="attachment" href="#{upload2.url}">test2</a>
- <a class="attachment" href="#{upload3.url}">test2</a>
<a class="test attachment" href="#{upload.url}">test3</a>
<a class="test attachment" href="#{upload2.url}">test3</a><a class="test attachment" href="#{upload3.url}">test3</a>
<a class="test attachment" href="#{upload3.url}">This is some _test_ here</a>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
[this is some attachment|attachment](#{upload.short_url})
- [test2|attachment](#{upload.short_url})
- [test2|attachment](#{upload2.short_url})
- [test2|attachment](#{upload3.short_url})
[test3|attachment](#{upload.short_url})
[test3|attachment](#{upload2.short_url})[test3|attachment](#{upload3.short_url})
[This is some _test_ here|attachment](#{upload3.short_url})
MD
end
it "should correct full upload url to the shorter version" do
md = <<~MD
Some random text
![test](#{upload.short_url})
[test|attachment](#{upload.short_url})
<a class="test attachment" href="#{upload.url}">
test
</a>
`<a class="attachment" href="#{upload2.url}">In Code Block</a>`
<a class="attachment" href="#{upload3.url}">In Code Block</a>
<a href="#{upload.url}">newtest</a>
<a href="#{Discourse.base_url_no_prefix}#{upload.url}">newtest</a>
<a href="https://somerandomesite.com#{upload.url}">test</a>
<a class="attachment" href="https://somerandom.com/url">test</a>
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
Some random text
![test](#{upload.short_url})
[test|attachment](#{upload.short_url})
[test|attachment](#{upload.short_url})
`<a class="attachment" href="#{upload2.url}">In Code Block</a>`
<a class="attachment" href="#{upload3.url}">In Code Block</a>
[newtest](#{upload.short_url})
[newtest](#{upload.short_url})
<a href="https://somerandomesite.com#{upload.url}">test</a>
<a class="attachment" href="https://somerandom.com/url">test</a>
MD
end
it "accepts a block that yields when link does not match an upload in the db" do
url = "#{Discourse.base_url}#{upload.url}"
md = <<~MD
<img src="#{url}" alt="some image">
<img src="#{upload2.url}" alt="some image">
MD
upload.destroy!
InlineUploads.process(md, on_missing: lambda { |link| expect(link).to eq(url) })
end
end
context "with s3 uploads" do
let(:upload) { Fabricate(:upload_s3) }
let(:upload2) { Fabricate(:upload_s3) }
let(:upload3) { Fabricate(:upload) }
before do
upload3
setup_s3
SiteSetting.s3_cdn_url = "https://s3.cdn.com"
end
it "should correct image URLs to the short version" do
md = <<~MD
#{upload.url}
<img src="#{upload.url}" alt="some image">
test<img src="#{upload2.url}" alt="some image">test
<img src="#{URI.join(SiteSetting.s3_cdn_url, URI.parse(upload2.url).path).to_s}" alt="some image">
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
![](#{upload.short_url})
<img src="#{upload.short_url}" alt="some image">
test<img src="#{upload2.short_url}" alt="some image">test
<img src="#{upload2.short_url}" alt="some image">
MD
end
it "should correct markdown references" do
md = <<~MD
This is a [some reference] something
[some reference]: https:#{upload.url}
MD
expect(InlineUploads.process(md)).to eq(<<~MD)
This is a [some reference] something
[some reference]: #{Discourse.base_url}#{upload.short_path}
MD
end
end
end
describe ".match_md_inline_img" do
it "matches URLs with various characters" do
md = <<~MD
![test](https://some-site.com/a_test?q=1&b=hello%20there)
MD
url = nil
InlineUploads.match_md_inline_img(md, external_src: true) { |_match, src| url = src }
expect(url).to eq("https://some-site.com/a_test?q=1&b=hello%20there")
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserSilencer
attr_reader :user_history
def initialize(user, by_user = nil, opts = {})
@user, @by_user, @opts = user, by_user, opts
end
def self.silence(user, by_user = nil, opts = {})
UserSilencer.new(user, by_user, opts).silence
end
def self.unsilence(user, by_user = nil, opts = {})
UserSilencer.new(user, by_user, opts).unsilence
end
def self.was_silenced_for?(post)
return false if post.blank?
UserHistory.where(action: UserHistory.actions[:silence_user], post: post).exists?
end
def silence
hide_posts unless @opts[:keep_posts]
return false if @user.silenced_till.present?
@user.silenced_till = @opts[:silenced_till] || 1000.years.from_now
if @user.save
message_type = @opts[:message] || :silenced_by_staff
details = StaffMessageFormat.new(:silence, @opts[:reason], @opts[:message_body]).format
context = "#{message_type}: #{@opts[:reason]}"
if @by_user
log_params = { context: context, details: details }
log_params[:post_id] = @opts[:post_id].to_i if @opts[:post_id]
@user_history = StaffActionLogger.new(@by_user).log_silence_user(@user, log_params)
end
silence_message_params = {}
DiscourseEvent.trigger(
:user_silenced,
user: @user,
silenced_by: @by_user,
reason: @opts[:reason],
message: @opts[:message_body],
user_history: @user_history,
post_id: @opts[:post_id],
silenced_till: @user.silenced_till,
silenced_at: DateTime.now,
silence_message_params: silence_message_params,
)
silence_message_params.merge!(post_alert_options: { skip_send_email: true })
SystemMessage.create(@user, message_type, silence_message_params)
true
end
end
def hide_posts
return unless @user.trust_level == TrustLevel[0]
Post
.where(user_id: @user.id)
.where("created_at > ?", 24.hours.ago)
.update_all(
[
"hidden = true, hidden_reason_id = COALESCE(hidden_reason_id, ?)",
Post.hidden_reasons[:new_user_spam_threshold_reached],
],
)
topic_ids =
Post
.where(user_id: @user.id, post_number: 1)
.where("created_at > ?", 24.hours.ago)
.pluck(:topic_id)
Topic.where(id: topic_ids).update_all(visible: false) unless topic_ids.empty?
end
def unsilence
@user.silenced_till = nil
if @user.save
DiscourseEvent.trigger(:user_unsilenced, user: @user, by_user: @by_user)
SystemMessage.create(@user, :unsilenced)
StaffActionLogger.new(@by_user).log_unsilence_user(@user) if @by_user
end
end
end
``` | # frozen_string_literal: true
RSpec.describe UserSilencer do
fab!(:user) { Fabricate(:user, trust_level: 0) }
fab!(:post) { Fabricate(:post, user: user) }
fab!(:admin)
describe "silence" do
subject(:silence_user) { silencer.silence }
let(:silencer) { UserSilencer.new(user) }
it "silences the user correctly" do
expect { UserSilencer.silence(user, admin) }.to change { user.reload.silenced? }
# no need to silence as we are already silenced
expect { UserSilencer.silence(user) }.not_to change { Post.count }
# post should be hidden
post.reload
expect(post.topic.visible).to eq(false)
expect(post.hidden).to eq(true)
# history should be right
count =
UserHistory.where(
action: UserHistory.actions[:silence_user],
acting_user_id: admin.id,
target_user_id: user.id,
).count
expect(count).to eq(1)
end
it "skips sending the email for the silence PM via post alert" do
NotificationEmailer.enable
Jobs.run_immediately!
UserSilencer.silence(user, admin)
expect(ActionMailer::Base.deliveries.size).to eq(0)
end
it "does not hide posts for tl1" do
user.update!(trust_level: 1)
UserSilencer.silence(user, admin)
post.reload
expect(post.topic.visible).to eq(true)
expect(post.hidden).to eq(false)
end
it "allows us to silence the user for a particular post" do
expect(UserSilencer.was_silenced_for?(post)).to eq(false)
UserSilencer.new(user, Discourse.system_user, post_id: post.id).silence
expect(user).to be_silenced
expect(UserSilencer.was_silenced_for?(post)).to eq(true)
end
it "only hides posts from the past 24 hours" do
old_post = Fabricate(:post, user: user, created_at: 2.days.ago)
UserSilencer.new(user, Discourse.system_user, post_id: post.id).silence
expect(post.reload).to be_hidden
expect(post.topic.reload).to_not be_visible
old_post.reload
expect(old_post).to_not be_hidden
expect(old_post.topic).to be_visible
end
context "with a plugin hook" do
before do
@override_silence_message = ->(opts) do
opts[:silence_message_params][:message_title] = "override title"
opts[:silence_message_params][:message_raw] = "override raw"
end
DiscourseEvent.on(:user_silenced, &@override_silence_message)
end
after { DiscourseEvent.off(:user_silenced, &@override_silence_message) }
it "allows the message to be overridden" do
UserSilencer.silence(user, admin)
# force a reload in case instance has no posts
system_user = User.find(Discourse::SYSTEM_USER_ID)
post = system_user.posts.order("posts.id desc").first
expect(post.topic.title).to eq("override title")
expect(post.raw).to eq("override raw")
end
end
end
describe "unsilence" do
it "unsilences the user correctly" do
user.update!(silenced_till: 1.year.from_now)
expect { UserSilencer.unsilence(user, admin) }.to change { user.reload.silenced? }
# sends a message
pm = user.topics_allowed.order("topics.id desc").first
title = I18n.t("system_messages.unsilenced.subject_template")
expect(pm.title).to eq(title)
# logs it
count =
UserHistory.where(
action: UserHistory.actions[:unsilence_user],
acting_user_id: admin.id,
target_user_id: user.id,
).count
expect(count).to eq(1)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class TopicBookmarkable < BaseBookmarkable
include TopicPostBookmarkableHelper
def self.model
Topic
end
def self.serializer
UserTopicBookmarkSerializer
end
def self.preload_associations
[:category, :tags, { first_post: :user }]
end
def self.perform_custom_preload!(topic_bookmarks, guardian)
topics = topic_bookmarks.map(&:bookmarkable)
topic_user_lookup = TopicUser.lookup_for(guardian.user, topics)
topics.each { |topic| topic.user_data = topic_user_lookup[topic.id] }
end
def self.list_query(user, guardian)
topics = Topic.listable_topics.secured(guardian)
pms = Topic.private_messages_for_user(user)
topic_bookmarks =
user
.bookmarks_of_type("Topic")
.joins(
"INNER JOIN topics ON topics.id = bookmarks.bookmarkable_id AND bookmarks.bookmarkable_type = 'Topic'",
)
.joins("LEFT JOIN topic_users ON topic_users.topic_id = topics.id")
.where("topic_users.user_id = ?", user.id)
guardian.filter_allowed_categories(topic_bookmarks.merge(topics.or(pms)))
end
def self.search_query(bookmarks, query, ts_query, &bookmarkable_search)
bookmarkable_search.call(
bookmarks.joins(
"LEFT JOIN posts ON posts.topic_id = topics.id AND posts.post_number = 1",
).joins("LEFT JOIN post_search_data ON post_search_data.post_id = posts.id"),
"#{ts_query} @@ post_search_data.search_data",
)
end
def self.reminder_handler(bookmark)
send_reminder_notification(
bookmark,
topic_id: bookmark.bookmarkable_id,
post_number: 1,
data: {
title: bookmark.bookmarkable.title,
bookmarkable_url: bookmark.bookmarkable.first_post.url,
},
)
end
def self.reminder_conditions(bookmark)
bookmark.bookmarkable.present? && self.can_see?(bookmark.user.guardian, bookmark)
end
def self.can_see?(guardian, bookmark)
guardian.can_see_topic?(bookmark.bookmarkable)
end
def self.bookmark_metadata(bookmark, user)
{ topic_bookmarked: Bookmark.for_user_in_topic(user.id, bookmark.bookmarkable.id).exists? }
end
def self.validate_before_create(guardian, bookmarkable)
raise Discourse::InvalidAccess if bookmarkable.blank? || !guardian.can_see_topic?(bookmarkable)
end
def self.after_create(guardian, bookmark, opts)
sync_topic_user_bookmarked(guardian.user, bookmark.bookmarkable, opts)
end
def self.after_destroy(guardian, bookmark, opts)
sync_topic_user_bookmarked(guardian.user, bookmark.bookmarkable, opts)
end
def self.cleanup_deleted
related_topics = DB.query(<<~SQL, grace_time: 3.days.ago)
DELETE FROM bookmarks b
USING topics t
WHERE b.bookmarkable_id = t.id AND b.bookmarkable_type = 'Topic'
AND (t.deleted_at < :grace_time)
RETURNING t.id AS topic_id
SQL
related_topics_ids = related_topics.map(&:topic_id).uniq
related_topics_ids.each do |topic_id|
Jobs.enqueue(:sync_topic_user_bookmarked, topic_id: topic_id)
end
end
end
``` | # frozen_string_literal: true
require "rails_helper"
RSpec.describe TopicBookmarkable do
subject(:registered_bookmarkable) { RegisteredBookmarkable.new(TopicBookmarkable) }
fab!(:user)
fab!(:private_category) { Fabricate(:private_category, group: Fabricate(:group)) }
let(:guardian) { Guardian.new(user) }
let!(:topic1) { Fabricate(:topic) }
let!(:topic2) { Fabricate(:topic) }
let!(:post) { Fabricate(:post, topic: topic1) }
let!(:bookmark1) do
Fabricate(:bookmark, user: user, bookmarkable: topic1, name: "something i gotta do")
end
let!(:bookmark2) { Fabricate(:bookmark, user: user, bookmarkable: topic2) }
let!(:bookmark3) { Fabricate(:bookmark) }
let!(:topic_user1) { Fabricate(:topic_user, user: user, topic: topic1) }
let!(:topic_user2) { Fabricate(:topic_user, user: user, topic: topic2) }
describe "#perform_list_query" do
it "returns all the user's bookmarks" do
expect(registered_bookmarkable.perform_list_query(user, guardian).map(&:id)).to match_array(
[bookmark1.id, bookmark2.id],
)
end
it "does not return bookmarks for posts where the user does not have access to the topic category" do
bookmark1.bookmarkable.update!(category: private_category)
expect(registered_bookmarkable.perform_list_query(user, guardian).map(&:id)).to match_array(
[bookmark2.id],
)
end
it "does not return bookmarks for posts where the user does not have access to the private message" do
bookmark1.update!(bookmarkable: Fabricate(:private_message_topic))
expect(registered_bookmarkable.perform_list_query(user, guardian).map(&:id)).to match_array(
[bookmark2.id],
)
end
end
describe "#perform_search_query" do
before { SearchIndexer.enable }
it "returns bookmarks that match by name" do
ts_query = Search.ts_query(term: "gotta", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%gotta%",
ts_query,
).map(&:id),
).to match_array([bookmark1.id])
end
it "returns bookmarks that match by post search data (topic title or post content)" do
post.update(raw: "some post content")
topic1.update(title: "a great topic title")
ts_query = Search.ts_query(term: "post content", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%post content%",
ts_query,
).map(&:id),
).to match_array([bookmark1.id])
ts_query = Search.ts_query(term: "great topic", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%great topic%",
ts_query,
).map(&:id),
).to match_array([bookmark1.id])
ts_query = Search.ts_query(term: "blah", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%blah%",
ts_query,
).map(&:id),
).to eq([])
end
end
describe "#can_send_reminder?" do
it "cannot send reminder if the topic is deleted" do
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(true)
bookmark1.bookmarkable.trash!
bookmark1.reload
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(false)
end
it "cannot send reminder if the user cannot access the topic" do
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(true)
bookmark1.bookmarkable.update!(category: private_category)
bookmark1.reload
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(false)
end
end
describe "#reminder_handler" do
it "creates a notification for the user with the correct details" do
expect { registered_bookmarkable.send_reminder_notification(bookmark1) }.to change {
Notification.count
}.by(1)
notif = user.notifications.last
expect(notif.notification_type).to eq(Notification.types[:bookmark_reminder])
expect(notif.topic_id).to eq(bookmark1.bookmarkable_id)
expect(notif.post_number).to eq(1)
expect(notif.data).to eq(
{
title: bookmark1.bookmarkable.title,
bookmarkable_url: bookmark1.bookmarkable.first_post.url,
display_username: bookmark1.user.username,
bookmark_name: bookmark1.name,
bookmark_id: bookmark1.id,
}.to_json,
)
end
end
describe "#can_see?" do
it "returns false if the post is in a private category or private message the user cannot see" do
expect(registered_bookmarkable.can_see?(guardian, bookmark1)).to eq(true)
bookmark1.bookmarkable.update!(category: private_category)
expect(registered_bookmarkable.can_see?(guardian, bookmark1)).to eq(false)
bookmark1.update!(bookmarkable: Fabricate(:private_message_topic))
expect(registered_bookmarkable.can_see?(guardian, bookmark1)).to eq(false)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class PostBookmarkable < BaseBookmarkable
include TopicPostBookmarkableHelper
def self.model
Post
end
def self.serializer
UserPostBookmarkSerializer
end
def self.preload_associations
[{ topic: %i[tags category] }, :user]
end
def self.list_query(user, guardian)
topics = Topic.listable_topics.secured(guardian)
pms = Topic.private_messages_for_user(user)
post_bookmarks =
user
.bookmarks_of_type("Post")
.joins(
"INNER JOIN posts ON posts.id = bookmarks.bookmarkable_id AND bookmarks.bookmarkable_type = 'Post'",
)
.joins("LEFT JOIN topics ON topics.id = posts.topic_id")
.joins("LEFT JOIN topic_users ON topic_users.topic_id = topics.id")
.where("topic_users.user_id = ?", user.id)
guardian.filter_allowed_categories(
post_bookmarks.merge(topics.or(pms)).merge(Post.secured(guardian)),
)
end
def self.search_query(bookmarks, query, ts_query, &bookmarkable_search)
bookmarkable_search.call(
bookmarks.joins(
"LEFT JOIN post_search_data ON post_search_data.post_id = bookmarks.bookmarkable_id AND bookmarks.bookmarkable_type = 'Post'",
),
"#{ts_query} @@ post_search_data.search_data",
)
end
def self.reminder_handler(bookmark)
send_reminder_notification(
bookmark,
topic_id: bookmark.bookmarkable.topic_id,
post_number: bookmark.bookmarkable.post_number,
data: {
title: bookmark.bookmarkable.topic.title,
bookmarkable_url: bookmark.bookmarkable.url,
},
)
end
def self.reminder_conditions(bookmark)
bookmark.bookmarkable.present? && bookmark.bookmarkable.topic.present? &&
self.can_see?(bookmark.user.guardian, bookmark)
end
def self.can_see?(guardian, bookmark)
guardian.can_see_post?(bookmark.bookmarkable)
end
def self.bookmark_metadata(bookmark, user)
{
topic_bookmarked: Bookmark.for_user_in_topic(user.id, bookmark.bookmarkable.topic_id).exists?,
}
end
def self.validate_before_create(guardian, bookmarkable)
if bookmarkable.blank? || bookmarkable.topic.blank? ||
!guardian.can_see_topic?(bookmarkable.topic) || !guardian.can_see_post?(bookmarkable)
raise Discourse::InvalidAccess
end
end
def self.after_create(guardian, bookmark, opts)
sync_topic_user_bookmarked(guardian.user, bookmark.bookmarkable.topic, opts)
end
def self.after_destroy(guardian, bookmark, opts)
sync_topic_user_bookmarked(guardian.user, bookmark.bookmarkable.topic, opts)
end
def self.cleanup_deleted
related_topics = DB.query(<<~SQL, grace_time: 3.days.ago)
DELETE FROM bookmarks b
USING topics t, posts p
WHERE t.id = p.topic_id AND b.bookmarkable_id = p.id AND b.bookmarkable_type = 'Post'
AND (t.deleted_at < :grace_time OR p.deleted_at < :grace_time)
RETURNING t.id AS topic_id
SQL
related_topics_ids = related_topics.map(&:topic_id).uniq
related_topics_ids.each do |topic_id|
Jobs.enqueue(:sync_topic_user_bookmarked, topic_id: topic_id)
end
end
end
``` | # frozen_string_literal: true
require "rails_helper"
RSpec.describe PostBookmarkable do
subject(:registered_bookmarkable) { RegisteredBookmarkable.new(PostBookmarkable) }
fab!(:user)
fab!(:private_category) { Fabricate(:private_category, group: Fabricate(:group)) }
let(:guardian) { Guardian.new(user) }
let!(:post1) { Fabricate(:post) }
let!(:post2) { Fabricate(:post) }
let!(:bookmark1) do
Fabricate(:bookmark, user: user, bookmarkable: post1, name: "something i gotta do")
end
let!(:bookmark2) { Fabricate(:bookmark, user: user, bookmarkable: post2) }
let!(:bookmark3) { Fabricate(:bookmark) }
let!(:topic_user1) { Fabricate(:topic_user, user: user, topic: post1.topic) }
let!(:topic_user2) { Fabricate(:topic_user, user: user, topic: post2.topic) }
describe "#perform_list_query" do
it "returns all the user's bookmarks" do
expect(registered_bookmarkable.perform_list_query(user, guardian).map(&:id)).to match_array(
[bookmark1.id, bookmark2.id],
)
end
it "does not return bookmarks for posts where the user does not have access to the topic category" do
bookmark1.bookmarkable.topic.update(category: private_category)
expect(registered_bookmarkable.perform_list_query(user, guardian).map(&:id)).to match_array(
[bookmark2.id],
)
end
it "does not return bookmarks for posts where the user does not have access to the private message" do
bookmark1.bookmarkable.update(topic: Fabricate(:private_message_topic))
expect(registered_bookmarkable.perform_list_query(user, guardian).map(&:id)).to match_array(
[bookmark2.id],
)
end
end
describe "#perform_search_query" do
before { SearchIndexer.enable }
it "returns bookmarks that match by name" do
ts_query = Search.ts_query(term: "gotta", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%gotta%",
ts_query,
).map(&:id),
).to match_array([bookmark1.id])
end
it "returns bookmarks that match by post search data (topic title or post content)" do
post2.update(raw: "some post content")
post2.topic.update(title: "a great topic title")
ts_query = Search.ts_query(term: "post content", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%post content%",
ts_query,
).map(&:id),
).to match_array([bookmark2.id])
ts_query = Search.ts_query(term: "great topic", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%great topic%",
ts_query,
).map(&:id),
).to match_array([bookmark2.id])
ts_query = Search.ts_query(term: "blah", ts_config: "simple")
expect(
registered_bookmarkable.perform_search_query(
registered_bookmarkable.perform_list_query(user, guardian),
"%blah%",
ts_query,
).map(&:id),
).to eq([])
end
end
describe "#can_send_reminder?" do
it "cannot send reminder if the post or topic is deleted" do
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(true)
bookmark1.bookmarkable.trash!
bookmark1.reload
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(false)
Post.with_deleted.find_by(id: bookmark1.bookmarkable_id).recover!
bookmark1.reload
bookmark1.bookmarkable.topic.trash!
bookmark1.reload
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(false)
end
it "cannot send reminder if the user cannot access the topic" do
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(true)
bookmark1.bookmarkable.topic.update!(category: private_category)
bookmark1.reload
expect(registered_bookmarkable.can_send_reminder?(bookmark1)).to eq(false)
end
end
describe "#reminder_handler" do
it "creates a notification for the user with the correct details" do
expect { registered_bookmarkable.send_reminder_notification(bookmark1) }.to change {
Notification.count
}.by(1)
notif = user.notifications.last
expect(notif.notification_type).to eq(Notification.types[:bookmark_reminder])
expect(notif.topic_id).to eq(bookmark1.bookmarkable.topic_id)
expect(notif.post_number).to eq(bookmark1.bookmarkable.post_number)
expect(notif.data).to eq(
{
title: bookmark1.bookmarkable.topic.title,
bookmarkable_url: bookmark1.bookmarkable.url,
display_username: bookmark1.user.username,
bookmark_name: bookmark1.name,
bookmark_id: bookmark1.id,
}.to_json,
)
end
end
describe "#can_see?" do
it "returns false if the post is in a private category or private message the user cannot see" do
expect(registered_bookmarkable.can_see?(guardian, bookmark1)).to eq(true)
bookmark1.bookmarkable.topic.update(category: private_category)
expect(registered_bookmarkable.can_see?(guardian, bookmark1)).to eq(false)
bookmark1.bookmarkable.update(topic: Fabricate(:private_message_topic))
expect(registered_bookmarkable.can_see?(guardian, bookmark1)).to eq(false)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module WildcardUrlChecker
def self.check_url(url, url_to_check)
return false if !valid_url?(url_to_check)
escaped_url = Regexp.escape(url).sub("\\*", '\S*')
url_regex = Regexp.new("\\A#{escaped_url}\\z", "i")
url_to_check.match?(url_regex)
end
private
def self.valid_url?(url)
uri = URI.parse(url)
uri&.scheme.present? && uri&.host.present?
rescue URI::InvalidURIError
false
end
end
``` | # frozen_string_literal: true
RSpec.describe WildcardUrlChecker do
describe ".check_url" do
context "when url is valid" do
it "returns true" do
result1 =
described_class.check_url(
"https://*.discourse.org",
"https://anything.is.possible.discourse.org",
)
expect(result1).to eq(true)
result2 =
described_class.check_url("https://www.discourse.org", "https://www.discourse.org")
expect(result2).to eq(true)
result3 = described_class.check_url("*", "https://hello.discourse.org")
expect(result3).to eq(true)
result4 =
described_class.check_url("discourse://auth_redirect", "discourse://auth_redirect")
expect(result4).to eq(true)
result5 =
described_class.check_url(
"customprotocol://www.discourse.org",
"customprotocol://www.discourse.org",
)
expect(result5).to eq(true)
end
end
context "when url is invalid" do
it "returns false" do
result1 =
described_class.check_url(
"https://*.discourse.org",
"https://bad-domain.discourse.org.evil.com",
)
expect(result1).to eq(false)
result2 =
described_class.check_url(
"https://www.discourse.org",
"https://www.discourse.org.evil.com",
)
expect(result2).to eq(false)
result3 =
described_class.check_url("https://www.discourse.org", "https://www.www.discourse.org")
expect(result3).to eq(false)
result4 =
described_class.check_url(
"https://www.discourse.org",
"https://www.discourse.org\nwww.discourse.org.evil.com",
)
expect(result4).to eq(false)
result5 = described_class.check_url("https://", "https://")
expect(result5).to eq(false)
result6 =
described_class.check_url(
"invalid$protocol://www.discourse.org",
"invalid$protocol://www.discourse.org",
)
expect(result6).to eq(false)
result7 = described_class.check_url("noscheme", "noscheme")
expect(result7).to eq(false)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class EmailStyleUpdater
attr_reader :errors
def initialize(user)
@user = user
@errors = []
end
def update(attrs)
if attrs.has_key?(:html) && !attrs[:html].include?("%{email_content}")
@errors << I18n.t("email_style.html_missing_placeholder", placeholder: "%{email_content}")
end
if attrs.has_key?(:css)
begin
compiled_css = SassC::Engine.new(attrs[:css], style: :compressed).render
rescue SassC::SyntaxError => e
# @errors << I18n.t('email_style.css_syntax_error')
@errors << e.message[0...(e.message.index("\n"))]
end
end
return false unless @errors.empty?
if attrs.has_key?(:html)
if attrs[:html] == EmailStyle.default_template
SiteSetting.remove_override!(:email_custom_template)
else
SiteSetting.email_custom_template = attrs[:html]
end
end
if attrs.has_key?(:css)
if attrs[:css] == EmailStyle.default_css
SiteSetting.remove_override!(:email_custom_css)
SiteSetting.remove_override!(:email_custom_css_compiled)
else
SiteSetting.email_custom_css = attrs[:css]
SiteSetting.email_custom_css_compiled = compiled_css
end
end
@errors.empty?
end
end
``` | # frozen_string_literal: true
RSpec.describe EmailStyleUpdater do
fab!(:admin)
let(:default_html) { File.read("#{Rails.root}/app/views/email/default_template.html") }
let(:updater) { EmailStyleUpdater.new(admin) }
def expect_settings_to_be_unset
expect(SiteSetting.email_custom_template).to_not be_present
expect(SiteSetting.email_custom_css).to_not be_present
expect(SiteSetting.email_custom_css_compiled).to_not be_present
end
describe "update" do
it "can change the settings" do
expect(updater.update(html: "For you: %{email_content}", css: "h1 { color: blue; }")).to eq(
true,
)
expect(SiteSetting.email_custom_template).to eq("For you: %{email_content}")
expect(SiteSetting.email_custom_css).to eq("h1 { color: blue; }")
expect(SiteSetting.email_custom_css_compiled.strip).to eq("h1{color:blue}")
end
it "will not store defaults" do
updater.update(html: default_html, css: "")
expect_settings_to_be_unset
end
it "can clear settings if defaults given" do
SiteSetting.email_custom_template = "For you: %{email_content}"
SiteSetting.email_custom_css = "h1 { color: blue; }"
SiteSetting.email_custom_css_compiled = "h1{color:blue}"
updater.update(html: default_html, css: "")
expect_settings_to_be_unset
end
it "fails if html is missing email_content" do
expect(updater.update(html: "No email content", css: "")).to eq(false)
expect(updater.errors).to include(
I18n.t("email_style.html_missing_placeholder", placeholder: "%{email_content}"),
)
expect_settings_to_be_unset
end
it "fails if css is not valid scss" do
expect(updater.update(html: "For you: %{email_content}", css: "h1 { color: blue;")).to eq(
false,
)
expect(updater.errors).to_not be_empty
expect(updater.errors.first).to include('Error: expected "}".')
expect_settings_to_be_unset
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class SearchIndexer
MIN_POST_BLURB_INDEX_VERSION = 4
POST_INDEX_VERSION = 5
TOPIC_INDEX_VERSION = 4
CATEGORY_INDEX_VERSION = 3
USER_INDEX_VERSION = 3
TAG_INDEX_VERSION = 3
# version to apply when issuing a background reindex
REINDEX_VERSION = 0
TS_VECTOR_PARSE_REGEX = /('([^']*|'')*'\:)(([0-9]+[A-D]?,?)+)/
def self.disable
@disabled = true
end
def self.enable
@disabled = false
end
def self.update_index(table:, id:, a_weight: nil, b_weight: nil, c_weight: nil, d_weight: nil)
raw_data = { a: a_weight, b: b_weight, c: c_weight, d: d_weight }
# The version used in excerpts
search_data = raw_data.transform_values { |data| Search.prepare_data(data || "", :index) }
# The version used to build the index
indexed_data =
search_data.transform_values do |data|
data.gsub(/\S+/) { |word| word[0...SiteSetting.search_max_indexed_word_length] }
end
table_name = "#{table}_search_data"
foreign_key = "#{table}_id"
# for user login and name use "simple" lowercase stemmer
stemmer = table == "user" ? "simple" : Search.ts_config
ranked_index = <<~SQL
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:a,''))")}, 'A') ||
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:b,''))")}, 'B') ||
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:c,''))")}, 'C') ||
setweight(to_tsvector('#{stemmer}', #{Search.wrap_unaccent("coalesce(:d,''))")}, 'D')
SQL
tsvector = DB.query_single("SELECT #{ranked_index}", indexed_data)[0]
additional_lexemes = []
# we also want to index parts of a domain name
# that way stemmed single word searches will match
additional_words = []
tsvector
.scan(/'(([a-zA-Z0-9]+\.)+[a-zA-Z0-9]+)'\:([\w+,]+)/)
.reduce(additional_lexemes) do |array, (lexeme, _, positions)|
count = 0
if lexeme !~ /\A(\d+\.)?(\d+\.)*(\*|\d+)\z/
loop do
count += 1
break if count >= 10 # Safeguard here to prevent infinite loop when a term has many dots
term, _, remaining = lexeme.partition(".")
break if remaining.blank?
additional_words << [term, positions]
array << "'#{remaining}':#{positions}"
lexeme = remaining
end
end
array
end
extra_domain_word_terms =
if additional_words.length > 0
DB
.query_single(
"SELECT to_tsvector(?, ?)",
stemmer,
additional_words.map { |term, _| term }.join(" "),
)
.first
.scan(TS_VECTOR_PARSE_REGEX)
.map do |term, _, indexes|
new_indexes =
indexes
.split(",")
.map do |index|
existing_positions = additional_words[index.to_i - 1]
if existing_positions
existing_positions[1]
else
index
end
end
.join(",")
"#{term}#{new_indexes}"
end
.join(" ")
end
tsvector = "#{tsvector} #{additional_lexemes.join(" ")} #{extra_domain_word_terms}"
if (max_dupes = SiteSetting.max_duplicate_search_index_terms) > 0
reduced = []
tsvector
.scan(TS_VECTOR_PARSE_REGEX)
.each do |term, _, indexes|
family_counts = Hash.new(0)
new_index_array = []
indexes
.split(",")
.each do |index|
family = nil
family = index[-1] if index[-1].match?(/[A-D]/)
# title dupes can completely dominate the index
# so we limit them to 1
if (family_counts[family] += 1) <= (family == "A" ? 1 : max_dupes)
new_index_array << index
end
end
reduced << "#{term.strip}#{new_index_array.join(",")}"
end
tsvector = reduced.join(" ")
end
indexed_data =
if table.to_s == "post"
clean_post_raw_data!(search_data[:d])
else
search_data.values.select { |d| d.length > 0 }.join(" ")
end
params = {
"raw_data" => indexed_data,
"#{foreign_key}" => id,
"locale" => SiteSetting.default_locale,
"version" => const_get("#{table.upcase}_INDEX_VERSION"),
"search_data" => tsvector,
}
yield params if block_given?
table_name.camelize.constantize.upsert(params)
rescue => e
if Rails.env.test?
raise
else
# TODO is there any way we can safely avoid this?
# best way is probably pushing search indexer into a dedicated process so it no longer happens on save
# instead in the post processor
Discourse.warn_exception(
e,
message: "Unexpected error while indexing #{table} for search",
env: {
id: id,
},
)
end
end
def self.update_topics_index(topic_id, title, cooked)
# a bit inconsistent that we use title as A and body as B when in
# the post index body is D
update_index(
table: "topic",
id: topic_id,
a_weight: title,
b_weight: HtmlScrubber.scrub(cooked)[0...Topic::MAX_SIMILAR_BODY_LENGTH],
)
end
def self.update_posts_index(
post_id:,
topic_title:,
category_name:,
topic_tags:,
cooked:,
private_message:
)
update_index(
table: "post",
id: post_id,
a_weight: topic_title,
b_weight: category_name,
c_weight: topic_tags,
# The tsvector resulted from parsing a string can be double the size of
# the original string. Since there is no way to estimate the length of
# the expected tsvector, we limit the input to ~50% of the maximum
# length of a tsvector (1_048_576 bytes).
d_weight: HtmlScrubber.scrub(cooked)[0..600_000],
) { |params| params["private_message"] = private_message }
end
def self.update_users_index(user_id, username, name, custom_fields)
update_index(
table: "user",
id: user_id,
a_weight: username,
b_weight: name,
c_weight: custom_fields,
)
end
def self.update_categories_index(category_id, name)
update_index(table: "category", id: category_id, a_weight: name)
end
def self.update_tags_index(tag_id, name)
update_index(table: "tag", id: tag_id, a_weight: name.downcase)
end
def self.queue_category_posts_reindex(category_id)
return if @disabled
DB.exec(<<~SQL, category_id: category_id, version: REINDEX_VERSION)
UPDATE post_search_data
SET version = :version
FROM posts
INNER JOIN topics ON posts.topic_id = topics.id
INNER JOIN categories ON topics.category_id = categories.id
WHERE post_search_data.post_id = posts.id
AND categories.id = :category_id
SQL
end
def self.queue_users_reindex(user_ids)
return if @disabled
DB.exec(<<~SQL, user_ids: user_ids, version: REINDEX_VERSION)
UPDATE user_search_data
SET version = :version
WHERE user_search_data.user_id IN (:user_ids)
SQL
end
def self.queue_post_reindex(topic_id)
return if @disabled
DB.exec(<<~SQL, topic_id: topic_id, version: REINDEX_VERSION)
UPDATE post_search_data
SET version = :version
FROM posts
WHERE post_search_data.post_id = posts.id
AND posts.topic_id = :topic_id
SQL
end
def self.index(obj, force: false)
return if @disabled
category_name = nil
tag_names = nil
topic = nil
if Topic === obj
topic = obj
elsif Post === obj
topic = obj.topic
end
category_name = topic.category&.name if topic
if topic
tags = topic.tags.select(:id, :name).to_a
if tags.present?
tag_names =
(tags.map(&:name) + Tag.where(target_tag_id: tags.map(&:id)).pluck(:name)).join(" ")
end
end
if Post === obj && obj.raw.present? &&
(force || obj.saved_change_to_cooked? || obj.saved_change_to_topic_id?)
if topic
SearchIndexer.update_posts_index(
post_id: obj.id,
topic_title: topic.title,
category_name: category_name,
topic_tags: tag_names,
cooked: obj.cooked,
private_message: topic.private_message?,
)
SearchIndexer.update_topics_index(topic.id, topic.title, obj.cooked) if obj.is_first_post?
end
end
if User === obj && (obj.saved_change_to_username? || obj.saved_change_to_name? || force)
SearchIndexer.update_users_index(
obj.id,
obj.username_lower || "",
obj.name ? obj.name.downcase : "",
obj.user_custom_fields.searchable.map(&:value).join(" "),
)
end
if Topic === obj && (obj.saved_change_to_title? || force)
if obj.posts
if post = obj.posts.find_by(post_number: 1)
SearchIndexer.update_posts_index(
post_id: post.id,
topic_title: obj.title,
category_name: category_name,
topic_tags: tag_names,
cooked: post.cooked,
private_message: obj.private_message?,
)
SearchIndexer.update_topics_index(obj.id, obj.title, post.cooked)
end
end
end
if Category === obj && (obj.saved_change_to_name? || force)
SearchIndexer.queue_category_posts_reindex(obj.id)
SearchIndexer.update_categories_index(obj.id, obj.name)
end
if Tag === obj && (obj.saved_change_to_name? || force)
SearchIndexer.update_tags_index(obj.id, obj.name)
end
end
def self.clean_post_raw_data!(raw_data)
urls = Set.new
raw_data.scan(Discourse::Utils::URI_REGEXP) { urls << $& }
urls.each do |url|
begin
case File.extname(URI(url).path || "")
when Oneboxer::VIDEO_REGEX
raw_data.gsub!(url, I18n.t("search.video"))
when Oneboxer::AUDIO_REGEX
raw_data.gsub!(url, I18n.t("search.audio"))
end
rescue URI::InvalidURIError
end
end
raw_data
end
private_class_method :clean_post_raw_data!
class HtmlScrubber < Nokogiri::XML::SAX::Document
attr_reader :scrubbed
def initialize
@scrubbed = +""
end
def self.scrub(html)
return +"" if html.blank?
begin
document = Nokogiri.HTML5("<div>#{html}</div>", nil, Encoding::UTF_8.to_s)
rescue ArgumentError
return +""
end
nodes = document.css("div.#{CookedPostProcessor::LIGHTBOX_WRAPPER_CSS_CLASS}")
if nodes.present?
nodes.each do |node|
node.traverse do |child_node|
next if child_node == node
if %w[a img].exclude?(child_node.name)
child_node.remove
elsif child_node.name == "a"
ATTRIBUTES.each { |attribute| child_node.remove_attribute(attribute) }
end
end
end
end
document.css("img.emoji").each { |node| node.remove_attribute("alt") }
document
.css("a[href]")
.each do |node|
if node["href"] == node.text || MENTION_CLASSES.include?(node["class"])
node.remove_attribute("href")
end
if node["class"] == "anchor" && node["href"].starts_with?("#")
node.remove_attribute("href")
end
end
html_scrubber = new
Nokogiri::HTML::SAX::Parser.new(html_scrubber).parse(document.to_html)
html_scrubber.scrubbed.squish
end
MENTION_CLASSES ||= %w[mention mention-group]
ATTRIBUTES ||= %w[alt title href data-video-title]
def start_element(_name, attributes = [])
attributes = Hash[*attributes.flatten]
ATTRIBUTES.each do |attribute_name|
if attributes[attribute_name].present? &&
!(attribute_name == "href" && UrlHelper.is_local(attributes[attribute_name]))
characters(attributes[attribute_name])
end
end
end
def characters(str)
scrubbed << " #{str} "
end
end
end
``` | # frozen_string_literal: true
RSpec.describe SearchIndexer do
let(:post_id) { 99 }
before { SearchIndexer.enable }
after { SearchIndexer.disable }
it "correctly indexes chinese" do
SiteSetting.default_locale = "zh_CN"
data = "你好世界"
SearchIndexer.update_posts_index(
post_id: post_id,
topic_title: "",
category_name: "",
topic_tags: "",
cooked: data,
private_message: false,
)
post_search_data = PostSearchData.find_by(post_id: post_id)
expect(post_search_data.raw_data).to eq("你好 世界")
expect(post_search_data.search_data).to eq("'世界':2 '你好':1")
end
it "extract youtube title" do
html =
"<div class=\"lazy-video-container\" data-video-id=\"lmFgeFh2nlw\" data-video-title=\"Metallica Mixer Explains Missing Bass on 'And Justice for All' [Exclusive]\" data-provider-name=\"youtube\"></div>"
scrubbed = SearchIndexer::HtmlScrubber.scrub(html)
expect(scrubbed).to eq(
"Metallica Mixer Explains Missing Bass on 'And Justice for All' [Exclusive]",
)
end
it "extract a link" do
html = "<a href='http://meta.discourse.org/'>link</a>"
scrubbed = SearchIndexer::HtmlScrubber.scrub(html)
expect(scrubbed).to eq("http://meta.discourse.org/ link")
end
it "ignores autogenerated link anchors" do
html = "<a class='anchor' href='#something-special'>something special</a>"
scrubbed = SearchIndexer::HtmlScrubber.scrub(html)
expect(scrubbed).to eq("something special")
end
it "extracts @username from mentions" do
html =
'<p><a class="mention" href="/u/%E7%8B%AE%E5%AD%90">@狮子</a> <a class="mention" href="/u/foo">@foo</a></p>'
scrubbed = SearchIndexer::HtmlScrubber.scrub(html)
expect(scrubbed).to eq("@狮子 @foo")
end
it "extracts @groupname from group mentions" do
html =
'<p><a class="mention-group" href="/groups/%D0%B0%D0%B2%D1%82%D0%BE%D0%BC%D0%BE%D0%B1%D0%B8%D0%BB%D0%B8%D1%81%D1%82">@автомобилист</a></p>'
scrubbed = SearchIndexer::HtmlScrubber.scrub(html)
expect(scrubbed).to eq("@автомобилист")
end
it "extracts emoji name from emoji image" do
emoji = Emoji["wink"]
html =
%Q|<img src=\"#{URI.join(Discourse.base_url_no_prefix, emoji.url)}\" title=\":wink:\" class=\"emoji only-emoji\" alt=\":wink:\" loading=\"lazy\" width=\"20\" height=\"20\">|
scrubbed = SearchIndexer::HtmlScrubber.scrub(html)
expect(scrubbed).to eq(":wink:")
end
it "doesn't index local files" do
html = <<~HTML
<p><img src="https://www.discourse.org/logo.png" alt="Discourse"></p>
<p><img src="#{Discourse.base_url_no_prefix}/uploads/episodeinteractive/original/3X/0/f/0f40b818356bdc1d80acfa905034e95cfd112a3a.png" alt="51%20PM" width="289" height="398"></p>
<div class="lightbox-wrapper">
<a class="lightbox" href="#{Discourse.base_url_no_prefix}/uploads/episodeinteractive/original/3X/1/6/16790095df3baf318fb2eb1d7e5d7860dc45d48b.jpg" data-download-href="#{Discourse.base_url_no_prefix}/uploads/episodeinteractive/16790095df3baf318fb2eb1d7e5d7860dc45d48b" title="Untitled design (21).jpg" rel="nofollow noopener">
<img src="#{Discourse.base_url_no_prefix}/uploads/episodeinteractive/optimized/3X/1/6/16790095df3baf318fb2eb1d7e5d7860dc45d48b_1_563x500.jpg" alt="Untitled%20design%20(21)" width="563" height="500">
<div class="meta">
<svg class="fa d-icon d-icon-far-image svg-icon" aria-hidden="true"><use href="#far-image"></use></svg>
<span class="filename">Untitled design (21).jpg</span>
<span class="informations">1280x1136 472 KB</span>
<svg class="fa d-icon d-icon-discourse-expand svg-icon" aria-hidden="true"><use href="#discourse-expand"></use></svg>
</div>
</a>
</div>
HTML
scrubbed = SearchIndexer::HtmlScrubber.scrub(html)
expect(scrubbed).to eq("Discourse 51%20PM Untitled%20design%20(21)")
end
it "correctly indexes a post according to version" do
# Preparing so that they can be indexed to right version
SearchIndexer.update_posts_index(
post_id: post_id,
topic_title: "dummy",
category_name: "",
topic_tags: nil,
cooked: nil,
private_message: false,
)
PostSearchData.find_by(post_id: post_id).update!(version: -1)
data = "<a>This</a> is a test"
SearchIndexer.update_posts_index(
post_id: post_id,
topic_title: "",
category_name: "",
topic_tags: nil,
cooked: data,
private_message: false,
)
raw_data, locale, version =
PostSearchData.where(post_id: post_id).pluck(:raw_data, :locale, :version)[0]
expect(raw_data).to eq("This is a test")
expect(locale).to eq(SiteSetting.default_locale)
expect(version).to eq(SearchIndexer::POST_INDEX_VERSION)
end
describe ".index" do
let(:topic) { Fabricate(:topic, title: "this is a title that I am testing") }
let(:post) { Fabricate(:post, topic: topic) }
it "should index posts correctly" do
expect { post }.to change { PostSearchData.count }.by(1)
expect { post.update!(raw: "this is new content") }.to change {
post.reload.post_search_data.search_data
}
expect { post.update!(topic_id: Fabricate(:topic).id) }.to change {
post.reload.post_search_data.search_data
}
end
it "should work with edge case domain names" do
# 00E5A4 stems to 00e5 and a4, which is odd, but by-design
# this may cause internal indexing to fail due to indexes not aligning
# when stuffing terms for domains
post.update!(cooked: <<~HTML)
Test.00E5A4.1
HTML
SearchIndexer.update_posts_index(
post_id: post.id,
topic_title: post.topic.title,
category_name: post.topic.category&.name,
topic_tags: post.topic.tags.map(&:name).join(" "),
cooked: post.cooked,
private_message: post.topic.private_message?,
)
end
it "should work with invalid HTML" do
post.update!(cooked: "<FD>" * Nokogiri::Gumbo::DEFAULT_MAX_TREE_DEPTH)
SearchIndexer.update_posts_index(
post_id: post.id,
topic_title: post.topic.title,
category_name: post.topic.category&.name,
topic_tags: post.topic.tags.map(&:name).join(" "),
cooked: post.cooked,
private_message: post.topic.private_message?,
)
end
it "should not index posts with empty raw" do
expect do
post = Fabricate.build(:post, raw: "", post_type: Post.types[:small_action])
post.save!(validate: false)
end.to_not change { PostSearchData.count }
end
it "should not tokenize urls and duplicate title and href in <a>" do
post.update!(raw: <<~RAW)
https://meta.discourse.org/some.png
RAW
post.rebake!
post.reload
expect(post.post_search_data.raw_data).to eq("https://meta.discourse.org/some.png")
expect(post.post_search_data.search_data).to eq_ts_vector(
"'/some.png':12 'discourse.org':11 'meta.discourse.org':11 'meta.discourse.org/some.png':10 'org':11 'test':8A 'titl':4A 'uncategor':9B 'meta':11 'discours':11",
)
end
it "should not tokenize versions" do
post.update!(raw: "123.223")
expect(post.post_search_data.search_data).to eq(
"'123.223':10 'test':8A 'titl':4A 'uncategor':9B",
)
post.update!(raw: "15.2.231.423")
post.reload
expect(post.post_search_data.search_data).to eq(
"'15.2.231.423':10 'test':8A 'titl':4A 'uncategor':9B",
)
end
it "should tokenize host of a URL and removes query string" do
category = Fabricate(:category, name: "awesome category")
topic = Fabricate(:topic, category: category, title: "this is a test topic")
post = Fabricate(:post, topic: topic, raw: <<~RAW)
a https://car.com?bob=1, http://efg.com.au?bill=1 b hij.net/xyz=1
www.klm.net/?IGNORE=1 <a href="http://abc.de.nop.co.uk?IGNORE=1&ignore2=2">test</a> https://cars.com
RAW
post.rebake!
post.reload
topic = post.topic
# Note, a random non URL string should be tokenized properly,
# hence www.klm.net?IGNORE=1 it was inserted in autolinking.
# We could consider amending the auto linker to add
# more context to say "hey, this part of <a href>...</a> was a guess by autolinker.
# A blanket treating of non-urls without this logic is risky.
expect(post.post_search_data.raw_data).to eq(
"a https://car.com , http://efg.com.au b http://hij.net/xyz=1 hij.net/xyz=1 http://www.klm.net/ www.klm.net/?IGNORE=1 http://abc.de.nop.co.uk test https://cars.com",
)
expect(post.post_search_data.search_data).to eq_ts_vector(
"'/?ignore=1':21 '/xyz=1':14,17 'car.com':9 'cars.com':24 'abc.de.nop.co.uk':22 'au':10 'awesom':6B 'b':11 'categori':7B 'co.uk':22 'com':9,10,24 'com.au':10 'de.nop.co.uk':22 'efg.com.au':10 'hij.net':13,16 'hij.net/xyz=1':12,15 'klm.net':18,20 'net':13,16,18,20 'nop.co.uk':22 'test':4A,23 'topic':5A 'uk':22 'www.klm.net':18,20 'www.klm.net/?ignore=1':19 'car':9,24 'co':22 'de':22 'efg':10 'hij':13,16 'klm':18,20 'nop':22 'www':18,20 'abc':22",
)
end
it "should not include lightbox in search" do
Jobs.run_immediately!
SiteSetting.max_image_width = 1
stub_request(:get, "https://1.2.3.4/some.png").to_return(
status: 200,
body: file_from_fixtures("logo.png").read,
)
src = "https://meta.discourse.org/some.png"
post = Fabricate(:post, raw: <<~RAW)
Let me see how I can fix this image
<img src="#{src}" title="GOT" alt="white walkers" width="2" height="2">
RAW
post.rebake!
post.reload
expect(post.cooked).not_to include(CookedPostProcessor::LIGHTBOX_WRAPPER_CSS_CLASS)
expect(post.post_search_data.raw_data).to eq(
"Let me see how I can fix this image white walkers GOT",
)
end
it "should strips audio and videos URLs from raw data" do
SiteSetting.authorized_extensions = "mp4"
Fabricate(:video_upload)
post.update!(raw: <<~RAW)
link to an external page: https://google.com/?u=bar
link to an audio file: https://somesite.com/audio.m4a
link to a video file: https://somesite.com/content/somethingelse.MOV
link to an invalid URL: http:error]
RAW
expect(post.post_search_data.raw_data).to eq(
"link to an external page: https://google.com/ link to an audio file: #{I18n.t("search.audio")} link to a video file: #{I18n.t("search.video")} link to an invalid URL: http:error]",
)
expect(post.post_search_data.search_data).to eq_ts_vector(
"'/audio.m4a':23 '/content/somethingelse.mov':31 'audio':19 'com':15,22,30 'error':38 'extern':13 'file':20,28 'google.com':15 'http':37 'invalid':35 'link':10,16,24,32 'page':14 'somesite.com':22,30 'somesite.com/audio.m4a':21 'somesite.com/content/somethingelse.mov':29 'test':8A 'titl':4A 'uncategor':9B 'url':36 'video':27 'googl':15 'somesit':22,30",
)
end
it "should unaccent indexed content" do
SiteSetting.search_ignore_accents = true
post.update!(raw: "Cette oeuvre d'art n'est pas une œuvre")
post.post_search_data.reload
expect(post.post_search_data.search_data).not_to include("œuvr")
expect(post.post_search_data.search_data).to include("oeuvr")
SiteSetting.search_ignore_accents = false
SearchIndexer.index(post, force: true)
post.post_search_data.reload
expect(post.post_search_data.search_data).to include("œuvr")
expect(post.post_search_data.search_data).to include("oeuvr")
end
it "truncates long words in the index" do
SiteSetting.search_max_indexed_word_length = 4
title = "A title that is long enough"
contents = "I am the best beige object http://example.com/long/url"
topic.update!(title: title)
post.update!(raw: contents)
post_search_data = post.post_search_data
post_search_data.reload
expect(post_search_data.raw_data).to eq(contents)
words = post_search_data.search_data.scan(/'([^']*)'/).map { |match| match[0] }
expect(words).to contain_exactly(
"best",
"beig",
"obj",
"http",
"titl",
"long",
"enou",
"unca",
)
end
it "limits number of repeated terms when max_duplicate_search_index_terms site setting has been configured" do
SiteSetting.max_duplicate_search_index_terms = 5
contents = <<~TEXT
#{"sam " * 10}
<a href="https://something.com/path:path'path?term='hello'">url</a>
<a href="https://somethings.com/path:path'path?term='hello'">url</a>
TEXT
post.update!(raw: contents)
post_search_data = post.post_search_data
post_search_data.reload
terms =
"'/path:path''path':22,26 'com':21,25 'sam':10,11,12,13,14 'something.com':21 'something.com/path:path''path':20 'test':8A 'titl':4A 'uncategor':9B 'url':23,27 'someth':21,25 'somethings.com':25 'somethings.com/path:path''path':24"
expect(post_search_data.search_data).to eq_ts_vector(terms)
end
end
describe ".queue_post_reindex" do
let(:post) { Fabricate(:post) }
let(:topic) { post.topic }
it "should reset the version of search data for all posts in the topic" do
post2 = Fabricate(:post)
SearchIndexer.queue_post_reindex(topic.id)
expect(post.reload.post_search_data.version).to eq(SearchIndexer::REINDEX_VERSION)
expect(post2.reload.post_search_data.version).to eq(SearchIndexer::POST_INDEX_VERSION)
end
end
describe ".queue_users_reindex" do
let!(:user) { Fabricate(:user) }
let!(:user2) { Fabricate(:user) }
it "should reset the version of search data for all users" do
SearchIndexer.index(user, force: true)
SearchIndexer.index(user2, force: true)
SearchIndexer.queue_users_reindex([user.id])
expect(user.reload.user_search_data.version).to eq(SearchIndexer::REINDEX_VERSION)
expect(user2.reload.user_search_data.version).to eq(SearchIndexer::USER_INDEX_VERSION)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
class UserNotificationScheduleProcessor
attr_accessor :schedule, :user, :timezone_name
def initialize(schedule)
@schedule = schedule
@user = schedule.user
@timezone_name = user.user_option.timezone
end
def create_do_not_disturb_timings
local_time = Time.now.in_time_zone(timezone_name)
create_timings_for(local_time, days: 2)
end
def self.create_do_not_disturb_timings_for(schedule)
processor = UserNotificationScheduleProcessor.new(schedule)
processor.create_do_not_disturb_timings
end
private
def create_timings_for(local_time, days: 0, previous_timing: nil)
weekday = transform_wday(local_time.wday)
start_minute = schedule["day_#{weekday}_start_time"]
end_minute = schedule["day_#{weekday}_end_time"]
previous_timing = find_previous_timing(local_time) if previous_timing.nil? && start_minute != 0
if start_minute > 0
previous_timing.ends_at = utc_time_at_minute(local_time, start_minute - 1)
if previous_timing.id
previous_timing.save
else
user.do_not_disturb_timings.find_or_create_by(previous_timing.attributes.except("id"))
end
next_timing =
user.do_not_disturb_timings.new(
starts_at: utc_time_at_minute(local_time, end_minute),
scheduled: true,
)
save_timing_and_continue(local_time, next_timing, days)
else
save_timing_and_continue(local_time, previous_timing, days)
end
end
private
def find_previous_timing(local_time)
# Try and find a previously scheduled dnd timing that we can extend if the
# ends_at is at the previous midnight. fallback to a new timing if not.
previous =
user.do_not_disturb_timings.find_by(
ends_at: (local_time - 1.day).end_of_day.utc,
scheduled: true,
)
previous ||
user.do_not_disturb_timings.new(starts_at: local_time.beginning_of_day.utc, scheduled: true)
end
def save_timing_and_continue(local_time, timing, days)
if days == 0
if timing
timing.ends_at = local_time.end_of_day.utc
user.do_not_disturb_timings.find_or_create_by(timing.attributes.except("id"))
end
user.publish_do_not_disturb(ends_at: user.do_not_disturb_until)
else
create_timings_for(local_time + 1.day, days: days - 1, previous_timing: timing)
end
end
def utc_time_at_minute(base_time, total_minutes)
hour = total_minutes / 60
minute = total_minutes % 60
Time.new(
base_time.year,
base_time.month,
base_time.day,
hour,
minute,
0,
base_time.formatted_offset,
).utc
end
def transform_wday(wday)
wday == 0 ? 6 : wday - 1
end
end
``` | # frozen_string_literal: true
RSpec.describe UserNotificationScheduleProcessor do
include ActiveSupport::Testing::TimeHelpers
fab!(:user)
let(:standard_schedule) do
schedule =
UserNotificationSchedule.create({ user: user }.merge(UserNotificationSchedule::DEFAULT))
schedule.enabled = true
schedule.save
schedule
end
describe "#create_do_not_disturb_timings" do
[
{ timezone: "UTC", offset: "+00:00" },
{ timezone: "America/Chicago", offset: "-06:00" },
{ timezone: "Australia/Sydney", offset: "+11:00" },
].each do |timezone_info|
it "creates dnd timings correctly for each timezone" do
user.user_option.update(timezone: timezone_info[:timezone])
travel_to Time.new(2020, 1, 4, 12, 0, 0, "+00:00") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(standard_schedule)
# The default schedule is 8am - 5pm.
# Expect DND timings to fill gaps before/after those times for 3 days.
dnd_timings = user.do_not_disturb_timings
offset = timezone_info[:offset]
expect(dnd_timings[0].starts_at).to eq_time(Time.new(2020, 1, 4, 0, 0, 0, offset))
expect(dnd_timings[0].ends_at).to eq_time(Time.new(2020, 1, 4, 7, 59, 0, offset))
expect(dnd_timings[1].starts_at).to eq_time(Time.new(2020, 1, 4, 17, 0, 0, offset))
expect(dnd_timings[1].ends_at).to eq_time(Time.new(2020, 1, 5, 7, 59, 0, offset))
expect(dnd_timings[2].starts_at).to eq_time(Time.new(2020, 1, 5, 17, 0, 0, offset))
expect(dnd_timings[2].ends_at).to eq_time(Time.new(2020, 1, 6, 7, 59, 0, offset))
expect(dnd_timings[3].starts_at).to eq_time(Time.new(2020, 1, 6, 17, 0, 0, offset))
expect(dnd_timings[3].ends_at).to be_within(1.second).of Time.new(
2020,
1,
6,
23,
59,
59,
offset,
)
end
end
end
it "does not create duplicate record, but ensures the correct records exist" do
user.user_option.update(timezone: "UTC")
travel_to Time.new(2020, 1, 4, 12, 0, 0, "+00:00") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(standard_schedule)
expect(user.do_not_disturb_timings.count).to eq(4)
# All duplicates, so no new timings should be created
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(standard_schedule)
expect(user.do_not_disturb_timings.count).to eq(4)
end
travel_to Time.new(2020, 1, 5, 12, 0, 0, "+00:00") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(standard_schedule)
# There is 1 overlap, so expect only 3 more to be created
expect(user.do_not_disturb_timings.count).to eq(7)
end
travel_to Time.new(2020, 1, 10, 12, 0, 0, "+00:00") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(standard_schedule)
# There is no overlap, so expect only 4 more to be created
expect(user.do_not_disturb_timings.count).to eq(11)
end
end
it "extends previously scheduled dnd timings to remove gaps" do
user.user_option.update(timezone: "UTC")
travel_to Time.new(2020, 1, 4, 12, 0, 0, "+00:00") do
existing_timing =
user.do_not_disturb_timings.create(
scheduled: true,
starts_at: 1.day.ago,
ends_at: Time.new(2020, 1, 03, 11, 0, 0, "+00:00").end_of_day,
)
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(standard_schedule)
expect(existing_timing.reload.ends_at).to eq_time(Time.new(2020, 1, 4, 7, 59, 0, "+00:00"))
end
end
it "creates the correct timings when the whole schedule is DND (-1)" do
user.user_option.update(timezone: "UTC")
schedule = standard_schedule
schedule.update(
day_0_start_time: -1,
day_1_start_time: -1,
day_2_start_time: -1,
day_3_start_time: -1,
day_4_start_time: -1,
day_5_start_time: -1,
day_6_start_time: -1,
)
travel_to Time.new(2020, 1, 4, 12, 0, 0, "+00:00") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(schedule)
expect(user.do_not_disturb_timings.count).to eq(1)
expect(user.do_not_disturb_timings.first.starts_at).to eq_time(
Time.new(2020, 1, 4, 0, 0, 0, "+00:00"),
)
expect(user.do_not_disturb_timings.first.ends_at).to be_within(1.second).of Time.new(
2020,
1,
6,
23,
59,
59,
"+00:00",
)
end
end
it "creates the correct timings at the end of a month and year" do
user.user_option.update(timezone: "UTC")
schedule = standard_schedule
schedule.update(
day_3_start_time: -1, # December 31, 2020 was a thursday. testing more cases.
)
travel_to Time.new(2020, 12, 31, 12, 0, 0, "+00:00") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(schedule)
expect(user.do_not_disturb_timings[0].starts_at).to eq_time(
Time.new(2020, 12, 31, 0, 0, 0, "+00:00"),
)
expect(user.do_not_disturb_timings[0].ends_at).to eq_time(
Time.new(2021, 1, 1, 7, 59, 0, "+00:00"),
)
expect(user.do_not_disturb_timings[1].starts_at).to eq_time(
Time.new(2021, 1, 1, 17, 0, 0, "+00:00"),
)
expect(user.do_not_disturb_timings[1].ends_at).to eq_time(
Time.new(2021, 1, 2, 7, 59, 0, "+00:00"),
)
expect(user.do_not_disturb_timings[2].starts_at).to eq_time(
Time.new(2021, 1, 2, 17, 0, 0, "+00:00"),
)
expect(user.do_not_disturb_timings[2].ends_at).to be_within(1.second).of Time.new(
2021,
1,
2,
23,
59,
59,
"+00:00",
)
end
end
it "handles midnight to midnight for multiple days (no timings created)" do
user.user_option.update(timezone: "UTC")
schedule = standard_schedule
schedule.update(
day_0_start_time: 0,
day_0_end_time: 1440,
day_1_start_time: 0,
day_1_end_time: 1440,
day_2_start_time: 0,
day_2_end_time: 1440,
)
travel_to Time.new(2021, 1, 4, 12, 0, 0, "+00:00") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(schedule)
expect(user.do_not_disturb_timings.count).to eq(0)
end
end
it "publishes to message bus when the user should enter DND" do
user.user_option.update(timezone: "UTC")
schedule = standard_schedule
travel_to Time.new(2020, 12, 31, 1, 0, 0, "+00:00") do
messages =
MessageBus.track_publish("/do-not-disturb/#{user.id}") do
UserNotificationScheduleProcessor.create_do_not_disturb_timings_for(schedule)
end
expect(messages.size).to eq(1)
expect(messages[0].data[:ends_at]).to eq(
Time.new(2020, 12, 31, 7, 59, 0, "+00:00").httpdate,
)
expect(messages[0].user_ids).to contain_exactly(user.id)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
# Consolidate notifications based on a threshold and a time window.
#
# If a consolidated notification already exists, we'll update it instead.
# If it doesn't and creating a new one would match the threshold, we delete existing ones and create a consolidated one.
# Otherwise, save the original one.
#
# Constructor arguments:
#
# - from: The notification type of the unconsolidated notification. e.g. `Notification.types[:private_message]`
# - to: The type the consolidated notification will have. You can use the same value as from to flatten notifications or bump existing ones.
# - threshold: If creating a new notification would match this number, we'll destroy existing ones and create a consolidated one. It also accepts a lambda that returns a number.
# - consolidation_window: Only consolidate notifications created since this value (Pass a ActiveSupport::Duration instance, and we'll call #ago on it).
# - unconsolidated_query_blk: A block with additional queries to apply when fetching for unconsolidated notifications.
# - consolidated_query_blk: A block with additional queries to apply when fetching for a consolidated notification.
#
# Need to call #set_precondition to configure this:
#
# - precondition_blk: A block that receives the mutated data and returns true if we have everything we need to consolidate.
#
# Need to call #set_mutations to configure this:
#
# - set_data_blk: A block that receives the notification data hash and mutates it, adding additional data needed for consolidation.
#
# Need to call #before_consolidation_callbacks to configure this:
#
# - before_update_blk: A block that is called before updating an already consolidated notification.
# Receives the consolidated object, the data hash, and the original notification.
#
# - before_consolidation_blk: A block that is called before creating a consolidated object.
# Receives an ActiveRecord::Relation with notifications about to be consolidated, and the new data hash.
#
module Notifications
class ConsolidateNotifications < ConsolidationPlan
def initialize(
from:,
to:,
consolidation_window: nil,
unconsolidated_query_blk: nil,
consolidated_query_blk: nil,
threshold:
)
@from = from
@to = to
@threshold = threshold
@consolidation_window = consolidation_window
@consolidated_query_blk = consolidated_query_blk
@unconsolidated_query_blk = unconsolidated_query_blk
@precondition_blk = nil
@set_data_blk = nil
@bump_notification = bump_notification
end
def before_consolidation_callbacks(before_update_blk: nil, before_consolidation_blk: nil)
@before_update_blk = before_update_blk
@before_consolidation_blk = before_consolidation_blk
self
end
def can_consolidate_data?(notification)
return false if get_threshold.zero? || to.blank?
return false if notification.notification_type != from
@data = consolidated_data(notification)
return true if @precondition_blk.nil?
@precondition_blk.call(data, notification)
end
def consolidate_or_save!(notification)
@data ||= consolidated_data(notification)
return unless can_consolidate_data?(notification)
update_consolidated_notification!(notification) ||
create_consolidated_notification!(notification) || notification.tap(&:save!)
end
private
attr_reader(
:notification,
:from,
:to,
:data,
:threshold,
:consolidated_query_blk,
:unconsolidated_query_blk,
:consolidation_window,
:bump_notification,
)
def update_consolidated_notification!(notification)
notifications = user_notifications(notification, to)
if consolidated_query_blk.present?
notifications = consolidated_query_blk.call(notifications, data)
end
consolidated = notifications.first
return if consolidated.blank?
data_hash = consolidated.data_hash.merge(data)
data_hash[:count] += 1 if data_hash[:count].present?
@before_update_blk.call(consolidated, data_hash, notification) if @before_update_blk
# Hack: We don't want to cache the old data if we're about to update it.
consolidated.instance_variable_set(:@data_hash, nil)
consolidated.update!(data: data_hash.to_json, read: false, updated_at: timestamp)
consolidated
end
def create_consolidated_notification!(notification)
notifications = user_notifications(notification, from)
if unconsolidated_query_blk.present?
notifications = unconsolidated_query_blk.call(notifications, data)
end
# Saving the new notification would pass the threshold? Consolidate instead.
count_after_saving_notification = notifications.count + 1
return if count_after_saving_notification <= get_threshold
timestamp = notifications.last.created_at
data[:count] = count_after_saving_notification
@before_consolidation_blk.call(notifications, data) if @before_consolidation_blk
consolidated = nil
Notification.transaction do
notifications.destroy_all
consolidated =
Notification.create!(
notification_type: to,
user_id: notification.user_id,
data: data.to_json,
updated_at: timestamp,
created_at: timestamp,
)
end
consolidated
end
def get_threshold
threshold.is_a?(Proc) ? threshold.call : threshold
end
def user_notifications(notification, type)
notifications = super(notification, type)
if consolidation_window.present?
notifications = notifications.where("created_at > ?", consolidation_window.ago)
end
notifications
end
def timestamp
@timestamp ||= Time.zone.now
end
end
end
``` | # frozen_string_literal: true
RSpec.describe Notifications::ConsolidateNotifications do
describe "#before_consolidation_callbacks" do
fab!(:user)
let(:rule) do
described_class.new(
from: Notification.types[:liked],
to: Notification.types[:liked],
consolidation_window: 10.minutes,
consolidated_query_blk:
Proc.new { |notifications| notifications.where("(data::json ->> 'consolidated')::bool") },
threshold: 1,
).set_mutations(set_data_blk: Proc.new { |n| n.data_hash.merge(consolidated: true) })
end
it "applies a callback when consolidating a notification" do
rule.before_consolidation_callbacks(
before_consolidation_blk:
Proc.new { |_, data| data[:consolidation_callback_called] = true },
)
rule.consolidate_or_save!(build_like_notification)
rule.consolidate_or_save!(build_like_notification)
consolidated_notification = Notification.where(user: user).last
expect(consolidated_notification.data_hash[:consolidation_callback_called]).to eq(true)
end
it "applies a callback when updating a consolidated notification" do
rule.before_consolidation_callbacks(
before_update_blk: Proc.new { |_, data| data[:update_callback_called] = true },
)
rule.consolidate_or_save!(build_like_notification)
rule.consolidate_or_save!(build_like_notification)
consolidated_notification = Notification.where(user: user).last
expect(consolidated_notification.data_hash[:update_callback_called]).to be_nil
rule.consolidate_or_save!(build_like_notification)
consolidated_notification = Notification.where(user: user).last
expect(consolidated_notification.data_hash[:update_callback_called]).to eq(true)
end
def build_like_notification
Fabricate.build(
:notification,
user: user,
notification_type: Notification.types[:liked],
data: {}.to_json,
)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# frozen_string_literal: true
module Notifications
class ConsolidationPlanner
def consolidate_or_save!(notification)
plan = plan_for(notification)
return :no_plan if plan.nil?
plan.consolidate_or_save!(notification)
end
private
def plan_for(notification)
consolidation_plans = [
liked_by_two_users,
liked,
group_message_summary,
group_membership,
new_features_notification,
]
consolidation_plans.concat(DiscoursePluginRegistry.notification_consolidation_plans)
consolidation_plans.detect { |plan| plan.can_consolidate_data?(notification) }
end
def liked
ConsolidateNotifications
.new(
from: Notification.types[:liked],
to: Notification.types[:liked_consolidated],
threshold: -> { SiteSetting.notification_consolidation_threshold },
consolidation_window: SiteSetting.likes_notification_consolidation_window_mins.minutes,
unconsolidated_query_blk:
Proc.new do |notifications, data|
key = "display_username"
value = data[key.to_sym]
filtered = notifications.where("data::json ->> 'username2' IS NULL")
filtered = filtered.where("data::json ->> '#{key}' = ?", value) if value
filtered
end,
consolidated_query_blk: filtered_by_data_attribute("display_username"),
)
.set_mutations(
set_data_blk:
Proc.new do |notification|
data = notification.data_hash
data.merge(username: data[:display_username])
end,
)
.set_precondition(precondition_blk: Proc.new { |data| data[:username2].blank? })
end
def liked_by_two_users
DeletePreviousNotifications
.new(
type: Notification.types[:liked],
previous_query_blk:
Proc.new do |notifications, data|
notifications.where(id: data[:previous_notification_id])
end,
)
.set_mutations(
set_data_blk:
Proc.new do |notification|
existing_notification_of_same_type =
Notification
.where(user: notification.user)
.order("notifications.id DESC")
.where(topic_id: notification.topic_id, post_number: notification.post_number)
.where(notification_type: notification.notification_type)
.where("created_at > ?", 1.day.ago)
.first
data = notification.data_hash
if existing_notification_of_same_type
same_type_data = existing_notification_of_same_type.data_hash
data.merge(
previous_notification_id: existing_notification_of_same_type.id,
username2: same_type_data[:display_username],
count: (same_type_data[:count] || 1).to_i + 1,
)
else
data
end
end,
)
.set_precondition(
precondition_blk:
Proc.new do |data, notification|
always_freq = UserOption.like_notification_frequency_type[:always]
notification.user&.user_option&.like_notification_frequency == always_freq &&
data[:previous_notification_id].present?
end,
)
end
def group_membership
ConsolidateNotifications
.new(
from: Notification.types[:private_message],
to: Notification.types[:membership_request_consolidated],
threshold: -> { SiteSetting.notification_consolidation_threshold },
consolidation_window: Notification::MEMBERSHIP_REQUEST_CONSOLIDATION_WINDOW_HOURS.hours,
unconsolidated_query_blk: filtered_by_data_attribute("topic_title"),
consolidated_query_blk: filtered_by_data_attribute("group_name"),
)
.set_precondition(precondition_blk: Proc.new { |data| data[:group_name].present? })
.set_mutations(
set_data_blk:
Proc.new do |notification|
data = notification.data_hash
post_id = data[:original_post_id]
custom_field =
PostCustomField.select(:value).find_by(post_id: post_id, name: "requested_group_id")
group_id = custom_field&.value
group_name =
group_id.present? ? Group.select(:name).find_by(id: group_id.to_i)&.name : nil
data[:group_name] = group_name
data
end,
)
end
def group_message_summary
DeletePreviousNotifications.new(
type: Notification.types[:group_message_summary],
previous_query_blk: filtered_by_data_attribute("group_id"),
).set_precondition(precondition_blk: Proc.new { |data| data[:group_id].present? })
end
def filtered_by_data_attribute(attribute_name)
Proc.new do |notifications, data|
if (value = data[attribute_name.to_sym])
notifications.where("data::json ->> '#{attribute_name}' = ?", value.to_s)
else
notifications
end
end
end
def new_features_notification
DeletePreviousNotifications.new(type: Notification.types[:new_features])
end
end
end
``` | # frozen_string_literal: true
RSpec.describe Notifications::ConsolidationPlanner do
subject(:planner) { described_class.new }
describe "#consolidate_or_save!" do
let(:threshold) { 1 }
fab!(:user)
let(:like_user) { "user1" }
before { SiteSetting.notification_consolidation_threshold = threshold }
it "does nothing it haven't passed the consolidation threshold yet" do
notification = build_notification(:liked, { display_username: like_user })
saved_like = planner.consolidate_or_save!(notification)
expect(saved_like.id).to be_present
expect(saved_like.notification_type).to eq(Notification.types[:liked])
end
it "consolidates multiple notifications into a new one" do
first_notification =
Fabricate(
:notification,
user: user,
notification_type: Notification.types[:liked],
data: { display_username: like_user }.to_json,
)
notification = build_notification(:liked, { display_username: like_user })
consolidated_like = planner.consolidate_or_save!(notification)
expect(consolidated_like.id).not_to eq(first_notification.id)
expect(consolidated_like.notification_type).to eq(Notification.types[:liked_consolidated])
data = JSON.parse(consolidated_like.data)
expect(data["count"]).to eq(threshold + 1)
end
it "updates the notification if we already consolidated it" do
count = 5
Fabricate(
:notification,
user: user,
notification_type: Notification.types[:liked_consolidated],
data: { count: count, display_username: like_user }.to_json,
)
notification = build_notification(:liked, { display_username: like_user })
updated = planner.consolidate_or_save!(notification)
expect { notification.reload }.to raise_error(ActiveRecord::RecordNotFound)
data = JSON.parse(updated.data)
expect(data["count"]).to eq(count + 1)
end
end
def build_notification(type_sym, data)
Fabricate.build(
:notification,
user: user,
notification_type: Notification.types[type_sym],
data: data.to_json,
)
end
def plan_for(notification)
planner.plan_for(notification)
end
end
|
Write RSpec test file for following ruby class
```ruby
class SearchService
pattr_initialize [:current_user!, :current_account!, :params!, :search_type!]
def perform
case search_type
when 'Message'
{ messages: filter_messages }
when 'Conversation'
{ conversations: filter_conversations }
when 'Contact'
{ contacts: filter_contacts }
else
{ contacts: filter_contacts, messages: filter_messages, conversations: filter_conversations }
end
end
private
def accessable_inbox_ids
@accessable_inbox_ids ||= @current_user.assigned_inboxes.pluck(:id)
end
def search_query
@search_query ||= params[:q].to_s.strip
end
def filter_conversations
@conversations = current_account.conversations.where(inbox_id: accessable_inbox_ids)
.joins('INNER JOIN contacts ON conversations.contact_id = contacts.id')
.where("cast(conversations.display_id as text) ILIKE :search OR contacts.name ILIKE :search OR contacts.email
ILIKE :search OR contacts.phone_number ILIKE :search OR contacts.identifier ILIKE :search", search: "%#{search_query}%")
.order('conversations.created_at DESC')
.limit(10)
end
def filter_messages
@messages = current_account.messages.where(inbox_id: accessable_inbox_ids)
.where('messages.content ILIKE :search', search: "%#{search_query}%")
.where('created_at >= ?', 3.months.ago)
.reorder('created_at DESC')
.limit(10)
end
def filter_contacts
@contacts = current_account.contacts.where(
"name ILIKE :search OR email ILIKE :search OR phone_number
ILIKE :search OR identifier ILIKE :search", search: "%#{search_query}%"
).resolved_contacts.order_on_last_activity_at('desc').limit(10)
end
end
``` | require 'rails_helper'
describe SearchService do
subject(:search) { described_class.new(current_user: user, current_account: account, params: params, search_type: search_type) }
let(:search_type) { 'all' }
let!(:account) { create(:account) }
let!(:user) { create(:user, account: account) }
let!(:inbox) { create(:inbox, account: account, enable_auto_assignment: false) }
let!(:harry) { create(:contact, name: 'Harry Potter', email: '[email protected]', account_id: account.id) }
let!(:conversation) { create(:conversation, contact: harry, inbox: inbox, account: account) }
let!(:message) { create(:message, account: account, inbox: inbox, content: 'Harry Potter is a wizard') }
before do
create(:inbox_member, user: user, inbox: inbox)
Current.account = account
end
after do
Current.account = nil
end
describe '#perform' do
context 'when search types' do
let(:params) { { q: 'Potter' } }
it 'returns all for all' do
search_type = 'all'
search = described_class.new(current_user: user, current_account: account, params: params, search_type: search_type)
expect(search.perform.keys).to match_array(%i[contacts messages conversations])
end
it 'returns contacts for contacts' do
search_type = 'Contact'
search = described_class.new(current_user: user, current_account: account, params: params, search_type: search_type)
expect(search.perform.keys).to match_array(%i[contacts])
end
it 'returns messages for messages' do
search_type = 'Message'
search = described_class.new(current_user: user, current_account: account, params: params, search_type: search_type)
expect(search.perform.keys).to match_array(%i[messages])
end
it 'returns conversations for conversations' do
search_type = 'Conversation'
search = described_class.new(current_user: user, current_account: account, params: params, search_type: search_type)
expect(search.perform.keys).to match_array(%i[conversations])
end
end
context 'when contact search' do
it 'searches across name, email, phone_number and identifier and returns in the order of contact last_activity_at' do
# random contact
create(:contact, account_id: account.id)
# unresolved contact -> no identifying info
# will not appear in search results
create(:contact, name: 'Harry Potter', account_id: account.id)
harry2 = create(:contact, email: '[email protected]', account_id: account.id, last_activity_at: 2.days.ago)
harry3 = create(:contact, identifier: 'Potter123', account_id: account.id, last_activity_at: 1.day.ago)
harry4 = create(:contact, identifier: 'Potter1235', account_id: account.id, last_activity_at: 2.minutes.ago)
params = { q: 'Potter ' }
search = described_class.new(current_user: user, current_account: account, params: params, search_type: 'Contact')
expect(search.perform[:contacts].map(&:id)).to eq([harry4.id, harry3.id, harry2.id, harry.id])
end
end
context 'when message search' do
it 'searches across message content and return in created_at desc' do
# random messages in another account
create(:message, content: 'Harry Potter is a wizard')
# random messsage in inbox with out access
create(:message, account: account, inbox: create(:inbox, account: account), content: 'Harry Potter is a wizard')
message2 = create(:message, account: account, inbox: inbox, content: 'harry is cool')
params = { q: 'Harry' }
search = described_class.new(current_user: user, current_account: account, params: params, search_type: 'Message')
expect(search.perform[:messages].map(&:id)).to eq([message2.id, message.id])
end
end
context 'when conversation search' do
it 'searches across conversations using contact information and order by created_at desc' do
# random messages in another inbox
random = create(:contact, account_id: account.id)
create(:conversation, contact: random, inbox: inbox, account: account)
conv2 = create(:conversation, contact: harry, inbox: inbox, account: account)
params = { q: 'Harry' }
search = described_class.new(current_user: user, current_account: account, params: params, search_type: 'Conversation')
expect(search.perform[:conversations].map(&:id)).to eq([conv2.id, conversation.id])
end
it 'searches across conversations with display id' do
random = create(:contact, account_id: account.id, name: 'random', email: '[email protected]', identifier: 'random')
new_converstion = create(:conversation, contact: random, inbox: inbox, account: account)
params = { q: new_converstion.display_id }
search = described_class.new(current_user: user, current_account: account, params: params, search_type: 'Conversation')
expect(search.perform[:conversations].map(&:id)).to include new_converstion.id
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class ActionService
include EmailHelper
def initialize(conversation)
@conversation = conversation.reload
end
def mute_conversation(_params)
@conversation.mute!
end
def snooze_conversation(_params)
@conversation.snoozed!
end
def resolve_conversation(_params)
@conversation.resolved!
end
def change_status(status)
@conversation.update!(status: status[0])
end
def change_priority(priority)
@conversation.update!(priority: (priority[0] == 'nil' ? nil : priority[0]))
end
def add_label(labels)
return if labels.empty?
@conversation.reload.add_labels(labels)
end
def assign_agent(agent_ids = [])
return @conversation.update!(assignee_id: nil) if agent_ids[0] == 'nil'
return unless agent_belongs_to_inbox?(agent_ids)
@agent = @account.users.find_by(id: agent_ids)
@conversation.update!(assignee_id: @agent.id) if @agent.present?
end
def remove_label(labels)
return if labels.empty?
labels = @conversation.label_list - labels
@conversation.update(label_list: labels)
end
def assign_team(team_ids = [])
return unassign_team if team_ids[0].zero?
return unless team_belongs_to_account?(team_ids)
@conversation.update!(team_id: team_ids[0])
end
def remove_assigned_team(_params)
@conversation.update!(team_id: nil)
end
def send_email_transcript(emails)
emails = emails[0].gsub(/\s+/, '').split(',')
emails.each do |email|
email = parse_email_variables(@conversation, email)
ConversationReplyMailer.with(account: @conversation.account).conversation_transcript(@conversation, email)&.deliver_later
end
end
private
def agent_belongs_to_inbox?(agent_ids)
member_ids = @conversation.inbox.members.pluck(:user_id)
assignable_agent_ids = member_ids + @account.administrators.ids
assignable_agent_ids.include?(agent_ids[0])
end
def team_belongs_to_account?(team_ids)
@account.team_ids.include?(team_ids[0])
end
def conversation_a_tweet?
return false if @conversation.additional_attributes.blank?
@conversation.additional_attributes['type'] == 'tweet'
end
end
``` | require 'rails_helper'
describe ActionService do
let(:account) { create(:account) }
describe '#resolve_conversation' do
let(:conversation) { create(:conversation) }
let(:action_service) { described_class.new(conversation) }
it 'resolves the conversation' do
expect(conversation.status).to eq('open')
action_service.resolve_conversation(nil)
expect(conversation.reload.status).to eq('resolved')
end
end
describe '#change_priority' do
let(:conversation) { create(:conversation) }
let(:action_service) { described_class.new(conversation) }
it 'changes the priority of the conversation to medium' do
action_service.change_priority(['medium'])
expect(conversation.reload.priority).to eq('medium')
end
it 'changes the priority of the conversation to nil' do
action_service.change_priority(['nil'])
expect(conversation.reload.priority).to be_nil
end
end
describe '#assign_agent' do
let(:agent) { create(:user, account: account, role: :agent) }
let(:conversation) { create(:conversation, account: account) }
let(:inbox_member) { create(:inbox_member, inbox: conversation.inbox, user: agent) }
let(:action_service) { described_class.new(conversation) }
it 'unassigns the conversation if agent id is nil' do
action_service.assign_agent(['nil'])
expect(conversation.reload.assignee).to be_nil
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Twitter::WebhookSubscribeService
include Rails.application.routes.url_helpers
pattr_initialize [:inbox_id]
def perform
ensure_webhook
unless subscription?
subscribe_response = twitter_client.create_subscription
raise StandardError, 'Twitter Subscription Failed' unless subscribe_response.status == '204'
end
true
end
private
delegate :channel, to: :inbox
delegate :twitter_client, to: :channel
def inbox
Inbox.find(inbox_id)
end
def twitter_url
webhooks_twitter_url(protocol: 'https')
end
def ensure_webhook
webhooks = fetch_webhooks
return true if webhooks&.first&.try(:[], 'url') == twitter_url
# twitter supports only one webhook url per environment
# so we will delete the existing one if its not chatwoot
unregister_webhook(webhooks.first) if webhooks&.first
register_webhook
end
def unregister_webhook(webhook)
unregister_response = twitter_client.unregister_webhook(id: webhook.try(:[], 'id'))
Rails.logger.info "TWITTER_UNREGISTER_WEBHOOK: #{unregister_response.body}"
end
def register_webhook
register_response = twitter_client.register_webhook(url: twitter_url)
Rails.logger.info "TWITTER_REGISTER_WEBHOOK: #{register_response.body}"
end
def subscription?
response = twitter_client.fetch_subscriptions
response.status == '204'
end
def fetch_webhooks
twitter_client.fetch_webhooks.body
end
end
``` | require 'rails_helper'
describe Twitter::WebhookSubscribeService do
subject(:webhook_subscribe_service) { described_class.new(inbox_id: twitter_inbox.id) }
let(:twitter_client) { instance_double(Twitty::Facade) }
let(:twitter_success_response) { instance_double(Twitty::Response, status: '200', body: { message: 'Valid' }) }
let(:twitter_error_response) { instance_double(Twitty::Response, status: '422', body: { message: 'Invalid request' }) }
let(:account) { create(:account) }
let(:twitter_channel) { create(:channel_twitter_profile, account: account) }
let(:twitter_inbox) { create(:inbox, channel: twitter_channel, account: account) }
before do
allow(Twitty::Facade).to receive(:new).and_return(twitter_client)
allow(twitter_client).to receive(:register_webhook).and_return(twitter_success_response)
allow(twitter_client).to receive(:unregister_webhook).and_return(twitter_success_response)
allow(twitter_client).to receive(:fetch_subscriptions).and_return(instance_double(Twitty::Response, status: '204', body: { message: 'Valid' }))
allow(twitter_client).to receive(:create_subscription).and_return(instance_double(Twitty::Response, status: '204', body: { message: 'Valid' }))
end
describe '#perform' do
context 'when webhook is not registered' do
it 'calls register_webhook' do
allow(twitter_client).to receive(:fetch_webhooks).and_return(
instance_double(Twitty::Response, status: '200', body: {})
)
webhook_subscribe_service.perform
expect(twitter_client).not_to have_received(:unregister_webhook)
expect(twitter_client).to have_received(:register_webhook)
end
end
context 'when valid webhook is registered' do
it 'calls unregister_webhook and then register webhook' do
allow(twitter_client).to receive(:fetch_webhooks).and_return(
instance_double(Twitty::Response, status: '200',
body: [{ 'url' => webhook_subscribe_service.send(:twitter_url) }])
)
webhook_subscribe_service.perform
expect(twitter_client).not_to have_received(:unregister_webhook)
expect(twitter_client).not_to have_received(:register_webhook)
end
end
context 'when invalid webhook is registered' do
it 'calls unregister_webhook and then register webhook' do
allow(twitter_client).to receive(:fetch_webhooks).and_return(
instance_double(Twitty::Response, status: '200',
body: [{ 'url' => 'invalid_url' }])
)
webhook_subscribe_service.perform
expect(twitter_client).to have_received(:unregister_webhook)
expect(twitter_client).to have_received(:register_webhook)
end
end
context 'when correct webhook is present' do
it 'calls create subscription if subscription is not present' do
allow(twitter_client).to receive(:fetch_webhooks).and_return(
instance_double(Twitty::Response, status: '200',
body: [{ 'url' => webhook_subscribe_service.send(:twitter_url) }])
)
allow(twitter_client).to receive(:fetch_subscriptions).and_return(instance_double(Twitty::Response, status: '500'))
webhook_subscribe_service.perform
expect(twitter_client).to have_received(:create_subscription)
end
it 'does not call create subscription if subscription is already present' do
allow(twitter_client).to receive(:fetch_webhooks).and_return(
instance_double(Twitty::Response, status: '200',
body: [{ 'url' => webhook_subscribe_service.send(:twitter_url) }])
)
webhook_subscribe_service.perform
expect(twitter_client).not_to have_received(:create_subscription)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Twitter::SendOnTwitterService < Base::SendOnChannelService
pattr_initialize [:message!]
private
delegate :additional_attributes, to: :contact
def channel_class
Channel::TwitterProfile
end
def perform_reply
conversation_type == 'tweet' ? send_tweet_reply : send_direct_message
end
def twitter_client
Twitty::Facade.new do |config|
config.consumer_key = ENV.fetch('TWITTER_CONSUMER_KEY', nil)
config.consumer_secret = ENV.fetch('TWITTER_CONSUMER_SECRET', nil)
config.access_token = channel.twitter_access_token
config.access_token_secret = channel.twitter_access_token_secret
config.base_url = 'https://api.twitter.com'
config.environment = ENV.fetch('TWITTER_ENVIRONMENT', '')
end
end
def conversation_type
conversation.additional_attributes['type']
end
def screen_name
return "@#{reply_to_message.inbox.name}" if reply_to_message.outgoing?
"@#{reply_to_message.sender&.additional_attributes.try(:[], 'screen_name') || ''}"
end
def send_direct_message
twitter_client.send_direct_message(
recipient_id: contact_inbox.source_id,
message: message.content
)
end
def reply_to_message
@reply_to_message ||= if message.in_reply_to
conversation.messages.find(message.in_reply_to)
else
conversation.messages.incoming.last
end
end
def send_tweet_reply
response = twitter_client.send_tweet_reply(
reply_to_tweet_id: reply_to_message.source_id,
tweet: "#{screen_name} #{message.content}"
)
if response.status == '200'
tweet_data = response.body
message.update!(source_id: tweet_data['id_str'])
else
Rails.logger.error "TWITTER_TWEET_REPLY_ERROR #{response.body}"
end
end
end
``` | require 'rails_helper'
describe Twitter::SendOnTwitterService do
subject(:send_reply_service) { described_class.new(message: message) }
let(:twitter_client) { instance_double(Twitty::Facade) }
let(:twitter_response) { instance_double(Twitty::Response) }
let(:account) { create(:account) }
let(:widget_inbox) { create(:inbox, account: account) }
let(:twitter_channel) { create(:channel_twitter_profile, account: account) }
let(:twitter_inbox) { create(:inbox, channel: twitter_channel, account: account) }
let(:contact) { create(:contact, account: account, additional_attributes: { screen_name: 'test_user' }) }
let(:contact_inbox) { create(:contact_inbox, contact: contact, inbox: twitter_inbox) }
let(:dm_conversation) do
create(
:conversation,
contact: contact,
inbox: twitter_inbox,
contact_inbox: contact_inbox,
additional_attributes: { type: 'direct_message' }
)
end
let(:tweet_conversation) do
create(
:conversation,
contact: contact,
inbox: twitter_inbox,
contact_inbox: contact_inbox,
additional_attributes: { type: 'tweet', tweet_id: '1234' }
)
end
before do
allow(Twitty::Facade).to receive(:new).and_return(twitter_client)
allow(twitter_client).to receive(:send_direct_message).and_return(true)
allow(twitter_client).to receive(:send_tweet_reply).and_return(twitter_response)
allow(twitter_response).to receive(:status).and_return('200')
allow(twitter_response).to receive(:body).and_return(JSON.parse({ id_str: '12345' }.to_json))
end
describe '#perform' do
context 'without reply' do
it 'if inbox channel is not twitter profile' do
message = create(:message, message_type: 'outgoing', inbox: widget_inbox, account: account)
expect { described_class.new(message: message).perform }.to raise_error 'Invalid channel service was called'
expect(twitter_client).not_to have_received(:send_direct_message)
end
it 'if message is private' do
message = create(:message, message_type: 'outgoing', private: true, inbox: twitter_inbox, account: account)
described_class.new(message: message).perform
expect(twitter_client).not_to have_received(:send_direct_message)
end
it 'if message has source_id' do
message = create(:message, message_type: 'outgoing', source_id: '123', inbox: twitter_inbox, account: account)
described_class.new(message: message).perform
expect(twitter_client).not_to have_received(:send_direct_message)
end
it 'if message is not outgoing' do
message = create(:message, message_type: 'incoming', inbox: twitter_inbox, account: account)
described_class.new(message: message).perform
expect(twitter_client).not_to have_received(:send_direct_message)
end
end
context 'with reply' do
it 'if conversation is a direct message' do
create(:message, message_type: :incoming, inbox: twitter_inbox, account: account, conversation: dm_conversation)
message = create(:message, message_type: :outgoing, inbox: twitter_inbox, account: account, conversation: dm_conversation)
described_class.new(message: message).perform
expect(twitter_client).to have_received(:send_direct_message)
end
context 'when conversation is a tweet' do
it 'creates a response with correct reply if reply to message is incoming' do
create(
:message,
message_type: :incoming,
sender: contact,
source_id: 'test-source-id-1',
inbox: twitter_inbox,
account: account,
conversation: tweet_conversation
)
message = create(:message, message_type: :outgoing, inbox: twitter_inbox, account: account, conversation: tweet_conversation)
described_class.new(message: message).perform
expect(twitter_client).to have_received(:send_tweet_reply).with(
reply_to_tweet_id: 'test-source-id-1',
tweet: "@test_user #{message.content}"
)
expect(message.reload.source_id).to eq '12345'
end
it 'creates a response with correct reply if reply to message is outgoing' do
outgoing_message = create(
:message,
message_type: :outgoing,
source_id: 'test-source-id-1',
inbox: twitter_inbox,
account: account,
conversation: tweet_conversation
)
reply_message = create(
:message,
message_type: :outgoing,
inbox: twitter_inbox,
account: account,
conversation: tweet_conversation,
in_reply_to: outgoing_message.id
)
described_class.new(message: reply_message).perform
expect(twitter_client).to have_received(:send_tweet_reply).with(
reply_to_tweet_id: 'test-source-id-1',
tweet: "@#{twitter_inbox.name} #{reply_message.content}"
)
expect(reply_message.reload.source_id).to eq '12345'
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class AutoAssignment::InboxRoundRobinService
pattr_initialize [:inbox!]
# called on inbox delete
def clear_queue
::Redis::Alfred.delete(round_robin_key)
end
# called on inbox member create
def add_agent_to_queue(user_id)
::Redis::Alfred.lpush(round_robin_key, user_id)
end
# called on inbox member delete
def remove_agent_from_queue(user_id)
::Redis::Alfred.lrem(round_robin_key, user_id)
end
def reset_queue
clear_queue
add_agent_to_queue(inbox.inbox_members.map(&:user_id))
end
# end of queue management functions
# allowed member ids = [assignable online agents supplied by the assignement service]
# the values of allowed member ids should be in string format
def available_agent(allowed_agent_ids: [])
reset_queue unless validate_queue?
user_id = get_member_from_allowed_agent_ids(allowed_agent_ids)
inbox.inbox_members.find_by(user_id: user_id)&.user if user_id.present?
end
private
def get_member_from_allowed_agent_ids(allowed_agent_ids)
return nil if allowed_agent_ids.blank?
user_id = queue.intersection(allowed_agent_ids).pop
pop_push_to_queue(user_id)
user_id
end
def pop_push_to_queue(user_id)
return if user_id.blank?
remove_agent_from_queue(user_id)
add_agent_to_queue(user_id)
end
def validate_queue?
return true if inbox.inbox_members.map(&:user_id).sort == queue.map(&:to_i).sort
end
def queue
::Redis::Alfred.lrange(round_robin_key)
end
def round_robin_key
format(::Redis::Alfred::ROUND_ROBIN_AGENTS, inbox_id: inbox.id)
end
end
``` | require 'rails_helper'
describe AutoAssignment::InboxRoundRobinService do
subject(:inbox_round_robin_service) { described_class.new(inbox: inbox) }
let!(:account) { create(:account) }
let!(:inbox) { create(:inbox, account: account) }
let!(:inbox_members) { create_list(:inbox_member, 5, inbox: inbox) }
describe '#available_agent' do
it 'returns nil if allowed_agent_ids is not passed or empty' do
expect(described_class.new(inbox: inbox).available_agent).to be_nil
end
it 'gets the first available agent in allowed_agent_ids and move agent to end of the list' do
expected_queue = [inbox_members[0].user_id, inbox_members[4].user_id, inbox_members[3].user_id, inbox_members[2].user_id,
inbox_members[1].user_id].map(&:to_s)
described_class.new(inbox: inbox).available_agent(allowed_agent_ids: [inbox_members[0].user_id, inbox_members[4].user_id].map(&:to_s))
expect(inbox_round_robin_service.send(:queue)).to eq(expected_queue)
end
it 'constructs round_robin_queue if queue is not present' do
inbox_round_robin_service.clear_queue
expect(inbox_round_robin_service.send(:queue)).to eq([])
inbox_round_robin_service.available_agent
# the service constructed the redis queue before performing
expect(inbox_round_robin_service.send(:queue).map(&:to_i)).to match_array(inbox_members.map(&:user_id))
end
it 'validates the queue and correct it before performing round robin' do
# adding some invalid ids to queue
inbox_round_robin_service.add_agent_to_queue([2, 3, 5, 9])
expect(inbox_round_robin_service.send(:queue).map(&:to_i)).not_to match_array(inbox_members.map(&:user_id))
inbox_round_robin_service.available_agent
# the service have refreshed the redis queue before performing
expect(inbox_round_robin_service.send(:queue).map(&:to_i)).to match_array(inbox_members.map(&:user_id))
end
context 'when allowed_agent_ids is passed' do
it 'will get the first allowed member and move it to the end of the queue' do
expected_queue = [inbox_members[3].user_id, inbox_members[2].user_id, inbox_members[4].user_id, inbox_members[1].user_id,
inbox_members[0].user_id].map(&:to_s)
expect(described_class.new(inbox: inbox).available_agent(
allowed_agent_ids: [
inbox_members[3].user_id,
inbox_members[2].user_id
].map(&:to_s)
)).to eq inbox_members[2].user
expect(described_class.new(inbox: inbox).available_agent(
allowed_agent_ids: [
inbox_members[3].user_id,
inbox_members[2].user_id
].map(&:to_s)
)).to eq inbox_members[3].user
expect(inbox_round_robin_service.send(:queue)).to eq(expected_queue)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class AutoAssignment::AgentAssignmentService
# Allowed agent ids: array
# This is the list of agents from which an agent can be assigned to this conversation
# examples: Agents with assignment capacity, Agents who are members of a team etc
pattr_initialize [:conversation!, :allowed_agent_ids!]
def find_assignee
round_robin_manage_service.available_agent(allowed_agent_ids: allowed_online_agent_ids)
end
def perform
new_assignee = find_assignee
conversation.update(assignee: new_assignee) if new_assignee
end
private
def online_agent_ids
online_agents = OnlineStatusTracker.get_available_users(conversation.account_id)
online_agents.select { |_key, value| value.eql?('online') }.keys if online_agents.present?
end
def allowed_online_agent_ids
# We want to perform roundrobin only over online agents
# Hence taking an intersection of online agents and allowed member ids
# the online user ids are string, since its from redis, allowed member ids are integer, since its from active record
@allowed_online_agent_ids ||= online_agent_ids & allowed_agent_ids&.map(&:to_s)
end
def round_robin_manage_service
@round_robin_manage_service ||= AutoAssignment::InboxRoundRobinService.new(inbox: conversation.inbox)
end
def round_robin_key
format(::Redis::Alfred::ROUND_ROBIN_AGENTS, inbox_id: conversation.inbox_id)
end
end
``` | require 'rails_helper'
RSpec.describe AutoAssignment::AgentAssignmentService do
let!(:account) { create(:account) }
let!(:inbox) { create(:inbox, account: account, enable_auto_assignment: false) }
let!(:inbox_members) { create_list(:inbox_member, 5, inbox: inbox) }
let!(:conversation) { create(:conversation, inbox: inbox, account: account) }
let!(:online_users) do
{
inbox_members[0].user_id.to_s => 'busy',
inbox_members[1].user_id.to_s => 'busy',
inbox_members[2].user_id.to_s => 'busy',
inbox_members[3].user_id.to_s => 'online',
inbox_members[4].user_id.to_s => 'online'
}
end
before do
inbox_members.each { |inbox_member| create(:account_user, account: account, user: inbox_member.user) }
allow(OnlineStatusTracker).to receive(:get_available_users).and_return(online_users)
end
describe '#perform' do
it 'will assign an online agent to the conversation' do
expect(conversation.reload.assignee).to be_nil
described_class.new(conversation: conversation, allowed_agent_ids: inbox_members.map(&:user_id).map(&:to_s)).perform
expect(conversation.reload.assignee).not_to be_nil
end
end
describe '#find_assignee' do
it 'will return an online agent from the allowed agent ids in roud robin' do
expect(described_class.new(conversation: conversation,
allowed_agent_ids: inbox_members.map(&:user_id).map(&:to_s)).find_assignee).to eq(inbox_members[3].user)
expect(described_class.new(conversation: conversation,
allowed_agent_ids: inbox_members.map(&:user_id).map(&:to_s)).find_assignee).to eq(inbox_members[4].user)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Messages::MentionService
pattr_initialize [:message!]
def perform
return unless valid_mention_message?(message)
validated_mentioned_ids = filter_mentioned_ids_by_inbox
return if validated_mentioned_ids.blank?
Conversations::UserMentionJob.perform_later(validated_mentioned_ids, message.conversation.id, message.account.id)
generate_notifications_for_mentions(validated_mentioned_ids)
add_mentioned_users_as_participants(validated_mentioned_ids)
end
private
def valid_mention_message?(message)
message.private? && message.content.present? && mentioned_ids.present?
end
def mentioned_ids
@mentioned_ids ||= message.content.scan(%r{\(mention://(user|team)/(\d+)/(.+?)\)}).map(&:second).uniq
end
def filter_mentioned_ids_by_inbox
inbox = message.inbox
valid_mentionable_ids = inbox.account.administrators.map(&:id) + inbox.members.map(&:id)
# Intersection of ids
mentioned_ids & valid_mentionable_ids.uniq.map(&:to_s)
end
def generate_notifications_for_mentions(validated_mentioned_ids)
validated_mentioned_ids.each do |user_id|
NotificationBuilder.new(
notification_type: 'conversation_mention',
user: User.find(user_id),
account: message.account,
primary_actor: message
).perform
end
end
def add_mentioned_users_as_participants(validated_mentioned_ids)
validated_mentioned_ids.each do |user_id|
message.conversation.conversation_participants.find_or_create_by(user_id: user_id)
end
end
end
``` | require 'rails_helper'
describe Messages::MentionService do
let!(:account) { create(:account) }
let!(:user) { create(:user, account: account) }
let!(:first_agent) { create(:user, account: account) }
let!(:second_agent) { create(:user, account: account) }
let!(:inbox) { create(:inbox, account: account) }
let!(:conversation) { create(:conversation, account: account, inbox: inbox, assignee: user) }
let(:builder) { double }
before do
create(:inbox_member, user: first_agent, inbox: inbox)
create(:inbox_member, user: second_agent, inbox: inbox)
conversation.reload
allow(NotificationBuilder).to receive(:new).and_return(builder)
allow(builder).to receive(:perform)
end
context 'when message contains mention' do
it 'creates notifications for inbox member who was mentioned' do
message = build(
:message,
conversation: conversation,
account: account,
content: "hi [#{first_agent.name}](mention://user/#{first_agent.id}/#{first_agent.name})",
private: true
)
described_class.new(message: message).perform
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'conversation_mention',
user: first_agent,
account: account,
primary_actor: message)
end
end
context 'when message contains multiple mentions' do
let(:message) do
build(
:message,
conversation: conversation,
account: account,
content: "hey [#{second_agent.name}](mention://user/#{second_agent.id}/#{second_agent.name})/
[#{first_agent.name}](mention://user/#{first_agent.id}/#{first_agent.name}),
please look in to this?",
private: true
)
end
it 'creates notifications for inbox member who was mentioned' do
described_class.new(message: message).perform
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'conversation_mention',
user: second_agent,
account: account,
primary_actor: message)
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'conversation_mention',
user: first_agent,
account: account,
primary_actor: message)
end
it 'add the users to the participants list' do
described_class.new(message: message).perform
expect(conversation.conversation_participants.map(&:user_id)).to contain_exactly(first_agent.id, second_agent.id)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Messages::NewMessageNotificationService
pattr_initialize [:message!]
def perform
return unless message.notifiable?
notify_participating_users
notify_conversation_assignee
end
private
delegate :conversation, :sender, :account, to: :message
def notify_participating_users
participating_users = conversation.conversation_participants.map(&:user)
participating_users -= [sender] if sender.is_a?(User)
participating_users.uniq.each do |participant|
NotificationBuilder.new(
notification_type: 'participating_conversation_new_message',
user: participant,
account: account,
primary_actor: message
).perform
end
end
def notify_conversation_assignee
return if conversation.assignee.blank?
return if assignee_already_notified_via_participation?
return if conversation.assignee == sender
NotificationBuilder.new(
notification_type: 'assigned_conversation_new_message',
user: conversation.assignee,
account: account,
primary_actor: message
).perform
end
def assignee_already_notified_via_participation?
return unless conversation.conversation_participants.map(&:user).include?(conversation.assignee)
# check whether participation notifcation is disabled for assignee
notification_setting = conversation.assignee.notification_settings.find_by(account_id: account.id)
notification_setting.public_send(:email_participating_conversation_new_message?) || notification_setting
.public_send(:push_participating_conversation_new_message?)
end
end
``` | require 'rails_helper'
describe Messages::NewMessageNotificationService do
context 'when message is not notifiable' do
it 'will not create any notifications' do
message = build(:message, message_type: :activity)
expect(NotificationBuilder).not_to receive(:new)
described_class.new(message: message).perform
end
end
context 'when message is notifiable' do
let(:account) { create(:account) }
let(:assignee) { create(:user, account: account) }
let(:participating_agent_1) { create(:user, account: account) }
let(:participating_agent_2) { create(:user, account: account) }
let(:inbox) { create(:inbox, account: account) }
let(:conversation) { create(:conversation, account: account, inbox: inbox, assignee: assignee) }
let(:builder) { double }
before do
create(:inbox_member, inbox: inbox, user: participating_agent_1)
create(:inbox_member, inbox: inbox, user: participating_agent_2)
create(:inbox_member, inbox: inbox, user: assignee)
create(:conversation_participant, conversation: conversation, user: participating_agent_1)
create(:conversation_participant, conversation: conversation, user: participating_agent_2)
create(:conversation_participant, conversation: conversation, user: assignee)
allow(NotificationBuilder).to receive(:new).and_return(builder)
allow(builder).to receive(:perform)
end
context 'when message is created by a participant' do
let(:message) { create(:message, conversation: conversation, account: account, sender: participating_agent_1) }
before do
described_class.new(message: message).perform
end
it 'creates notifications for other participating users' do
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'participating_conversation_new_message',
user: participating_agent_2,
account: account,
primary_actor: message)
end
it 'creates notifications for assignee' do
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'assigned_conversation_new_message',
user: assignee,
account: account,
primary_actor: message)
end
it 'will not create notifications for the user who created the message' do
expect(NotificationBuilder).not_to have_received(:new).with(notification_type: 'participating_conversation_new_message',
user: participating_agent_1,
account: account,
primary_actor: message)
end
end
context 'when message is created by a contact' do
let(:message) { create(:message, conversation: conversation, account: account) }
before do
described_class.new(message: message).perform
end
it 'creates notifications for assignee' do
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'assigned_conversation_new_message',
user: assignee,
account: account,
primary_actor: message)
end
it 'creates notifications for all participating users' do
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'participating_conversation_new_message',
user: participating_agent_1,
account: account,
primary_actor: message)
expect(NotificationBuilder).to have_received(:new).with(notification_type: 'participating_conversation_new_message',
user: participating_agent_2,
account: account,
primary_actor: message)
end
end
context 'with multiple notifications are subscribed' do
let(:message) { create(:message, conversation: conversation, account: account) }
before do
assignee.notification_settings.find_by(account_id: account.id).update(selected_email_flags: %w[email_assigned_conversation_new_message
email_participating_conversation_new_message])
described_class.new(message: message).perform
end
it 'will not create assignee notifications for the assignee if participating notification was send' do
expect(NotificationBuilder).not_to have_received(:new).with(notification_type: 'assigned_conversation_new_message',
user: assignee,
account: account,
primary_actor: message)
end
end
context 'when message is created by assignee' do
let(:message) { create(:message, conversation: conversation, account: account, sender: assignee) }
before do
described_class.new(message: message).perform
end
it 'will not create notifications for the user who created the message' do
expect(NotificationBuilder).not_to have_received(:new).with(notification_type: 'participating_conversation_new_message',
user: assignee,
account: account,
primary_actor: message)
expect(NotificationBuilder).not_to have_received(:new).with(notification_type: 'assigned_conversation_new_message',
user: assignee,
account: account,
primary_actor: message)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Sms::DeliveryStatusService
pattr_initialize [:inbox!, :params!]
def perform
return unless supported_status?
process_status if message.present?
end
private
def process_status
@message.status = status
@message.external_error = external_error if error_occurred?
@message.save!
end
def supported_status?
%w[message-delivered message-failed].include?(params[:type])
end
# Relevant documentation:
# https://dev.bandwidth.com/docs/mfa/webhooks/international/message-delivered
# https://dev.bandwidth.com/docs/mfa/webhooks/international/message-failed
def status
type_mapping = {
'message-delivered' => 'delivered',
'message-failed' => 'failed'
}
type_mapping[params[:type]]
end
def external_error
return nil unless error_occurred?
error_message = params[:description]
error_code = params[:errorCode]
"#{error_code} - #{error_message}"
end
def error_occurred?
params[:errorCode] && params[:type] == 'message-failed'
end
def message
return unless params[:message][:id]
@message ||= inbox.messages.find_by(source_id: params[:message][:id])
end
end
``` | require 'rails_helper'
describe Sms::DeliveryStatusService do
describe '#perform' do
let!(:account) { create(:account) }
let!(:sms_channel) { create(:channel_sms) }
let!(:contact) { create(:contact, account: account, phone_number: '+12345') }
let(:contact_inbox) { create(:contact_inbox, source_id: '+12345', contact: contact, inbox: sms_channel.inbox) }
let!(:conversation) { create(:conversation, contact: contact, inbox: sms_channel.inbox, contact_inbox: contact_inbox) }
describe '#perform' do
context 'when message delivery status is fired' do
before do
create(:message, account: account, inbox: sms_channel.inbox, conversation: conversation, status: :sent,
source_id: 'SMd560ac79e4a4d36b3ce59f1f50471986')
end
it 'updates the message if the message status is delivered' do
params = {
time: '2022-02-02T23:14:05.309Z',
type: 'message-delivered',
to: sms_channel.phone_number,
description: 'ok',
message: {
'id': conversation.messages.last.source_id
}
}
described_class.new(params: params, inbox: sms_channel.inbox).perform
expect(conversation.reload.messages.last.status).to eq('delivered')
end
it 'updates the message if the message status is failed' do
params = {
time: '2022-02-02T23:14:05.309Z',
type: 'message-failed',
to: sms_channel.phone_number,
description: 'Undeliverable',
errorCode: 995,
message: {
'id': conversation.messages.last.source_id
}
}
described_class.new(params: params, inbox: sms_channel.inbox).perform
expect(conversation.reload.messages.last.status).to eq('failed')
expect(conversation.reload.messages.last.external_error).to eq('995 - Undeliverable')
end
it 'does not update the message if the status is not a support status' do
params = {
time: '2022-02-02T23:14:05.309Z',
type: 'queued',
to: sms_channel.phone_number,
description: 'ok',
message: {
'id': conversation.messages.last.source_id
}
}
described_class.new(params: params, inbox: sms_channel.inbox).perform
expect(conversation.reload.messages.last.status).to eq('sent')
end
it 'does not update the message if the message is not present' do
params = {
time: '2022-02-02T23:14:05.309Z',
type: 'message-delivered',
to: sms_channel.phone_number,
description: 'ok',
message: {
'id': '123'
}
}
described_class.new(params: params, inbox: sms_channel.inbox).perform
expect(conversation.reload.messages.last.status).to eq('sent')
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Sms::IncomingMessageService
include ::FileTypeHelper
pattr_initialize [:inbox!, :params!]
def perform
set_contact
set_conversation
@message = @conversation.messages.create!(
content: params[:text],
account_id: @inbox.account_id,
inbox_id: @inbox.id,
message_type: :incoming,
sender: @contact,
source_id: params[:id]
)
attach_files
@message.save!
end
private
def account
@account ||= @inbox.account
end
def channel
@channel ||= @inbox.channel
end
def phone_number
params[:from]
end
def formatted_phone_number
TelephoneNumber.parse(phone_number).international_number
end
def set_contact
contact_inbox = ::ContactInboxWithContactBuilder.new(
source_id: params[:from],
inbox: @inbox,
contact_attributes: contact_attributes
).perform
@contact_inbox = contact_inbox
@contact = contact_inbox.contact
end
def conversation_params
{
account_id: @inbox.account_id,
inbox_id: @inbox.id,
contact_id: @contact.id,
contact_inbox_id: @contact_inbox.id
}
end
def set_conversation
# if lock to single conversation is disabled, we will create a new conversation if previous conversation is resolved
@conversation = if @inbox.lock_to_single_conversation
@contact_inbox.conversations.last
else
@contact_inbox.conversations.where
.not(status: :resolved).last
end
return if @conversation
@conversation = ::Conversation.create!(conversation_params)
end
def contact_attributes
{
name: formatted_phone_number,
phone_number: phone_number
}
end
def attach_files
return if params[:media].blank?
params[:media].each do |media_url|
# we don't need to process this files since chatwoot doesn't support it
next if media_url.end_with?('.smil', '.xml')
attachment_file = Down.download(
media_url,
http_basic_authentication: [channel.provider_config['api_key'], channel.provider_config['api_secret']]
)
@message.attachments.new(
account_id: @message.account_id,
file_type: file_type(attachment_file.content_type),
file: {
io: attachment_file,
filename: attachment_file.original_filename,
content_type: attachment_file.content_type
}
)
end
end
end
``` | require 'rails_helper'
describe Sms::IncomingMessageService do
describe '#perform' do
let!(:sms_channel) { create(:channel_sms) }
let(:params) do
{
'id': '3232420-2323-234324',
'owner': sms_channel.phone_number,
'applicationId': '2342349-324234d-32432432',
'time': '2022-02-02T23:14:05.262Z',
'segmentCount': 1,
'direction': 'in',
'to': [
sms_channel.phone_number
],
'from': '+14234234234',
'text': 'test message'
}.with_indifferent_access
end
context 'when valid text message params' do
it 'creates appropriate conversations, message and contacts' do
described_class.new(inbox: sms_channel.inbox, params: params).perform
expect(sms_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('+1 423-423-4234')
expect(sms_channel.inbox.messages.first.content).to eq(params[:text])
end
it 'appends to last conversation when if conversation already exisits' do
contact_inbox = create(:contact_inbox, inbox: sms_channel.inbox, source_id: params[:from])
2.times.each { create(:conversation, inbox: sms_channel.inbox, contact_inbox: contact_inbox) }
last_conversation = create(:conversation, inbox: sms_channel.inbox, contact_inbox: contact_inbox)
described_class.new(inbox: sms_channel.inbox, params: params).perform
# no new conversation should be created
expect(sms_channel.inbox.conversations.count).to eq(3)
# message appended to the last conversation
expect(last_conversation.messages.last.content).to eq(params[:text])
end
it 'reopen last conversation if last conversation is resolved and lock to single conversation is enabled' do
sms_channel.inbox.update(lock_to_single_conversation: true)
contact_inbox = create(:contact_inbox, inbox: sms_channel.inbox, source_id: params[:from])
last_conversation = create(:conversation, inbox: sms_channel.inbox, contact_inbox: contact_inbox)
last_conversation.update(status: 'resolved')
described_class.new(inbox: sms_channel.inbox, params: params).perform
# no new conversation should be created
expect(sms_channel.inbox.conversations.count).to eq(1)
expect(sms_channel.inbox.conversations.open.last.messages.last.content).to eq(params[:text])
expect(sms_channel.inbox.conversations.open.last.status).to eq('open')
end
it 'creates a new conversation if last conversation is resolved and lock to single conversation is disabled' do
sms_channel.inbox.update(lock_to_single_conversation: false)
contact_inbox = create(:contact_inbox, inbox: sms_channel.inbox, source_id: params[:from])
last_conversation = create(:conversation, inbox: sms_channel.inbox, contact_inbox: contact_inbox)
last_conversation.update(status: 'resolved')
described_class.new(inbox: sms_channel.inbox, params: params).perform
# new conversation should be created
expect(sms_channel.inbox.conversations.count).to eq(2)
# message appended to the last conversation
expect(contact_inbox.conversations.last.messages.last.content).to eq(params[:text])
end
it 'will not create a new conversation if last conversation is not resolved and lock to single conversation is disabled' do
sms_channel.inbox.update(lock_to_single_conversation: false)
contact_inbox = create(:contact_inbox, inbox: sms_channel.inbox, source_id: params[:from])
last_conversation = create(:conversation, inbox: sms_channel.inbox, contact_inbox: contact_inbox)
last_conversation.update(status: Conversation.statuses.except('resolved').keys.sample)
described_class.new(inbox: sms_channel.inbox, params: params).perform
# new conversation should be created
expect(sms_channel.inbox.conversations.count).to eq(1)
# message appended to the last conversation
expect(contact_inbox.conversations.last.messages.last.content).to eq(params[:text])
end
it 'creates attachment messages and ignores .smil files' do
stub_request(:get, 'http://test.com/test.png').to_return(status: 200, body: File.read('spec/assets/sample.png'), headers: {})
stub_request(:get, 'http://test.com/test2.png').to_return(status: 200, body: File.read('spec/assets/sample.png'), headers: {})
media_params = { 'media': [
'http://test.com/test.smil',
'http://test.com/test.png',
'http://test.com/test2.png'
] }.with_indifferent_access
described_class.new(inbox: sms_channel.inbox, params: params.merge(media_params)).perform
expect(sms_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('+1 423-423-4234')
expect(sms_channel.inbox.messages.first.content).to eq('test message')
expect(sms_channel.inbox.messages.first.attachments.present?).to be true
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Sms::SendOnSmsService < Base::SendOnChannelService
private
def channel_class
Channel::Sms
end
def perform_reply
send_on_sms
end
def send_on_sms
message_id = channel.send_message(message.conversation.contact_inbox.source_id, message)
message.update!(source_id: message_id) if message_id.present?
end
end
``` | require 'rails_helper'
describe Sms::SendOnSmsService do
describe '#perform' do
context 'when a valid message' do
let(:sms_request) { double }
let!(:sms_channel) { create(:channel_sms) }
let!(:contact_inbox) { create(:contact_inbox, inbox: sms_channel.inbox, source_id: '+123456789') }
let!(:conversation) { create(:conversation, contact_inbox: contact_inbox, inbox: sms_channel.inbox) }
it 'calls channel.send_message' do
message = create(:message, message_type: :outgoing, content: 'test',
conversation: conversation)
allow(HTTParty).to receive(:post).and_return(sms_request)
allow(sms_request).to receive(:success?).and_return(true)
allow(sms_request).to receive(:parsed_response).and_return({ 'id' => '123456789' })
expect(HTTParty).to receive(:post).with(
'https://messaging.bandwidth.com/api/v2/users/1/messages',
basic_auth: { username: '1', password: '1' },
headers: { 'Content-Type' => 'application/json' },
body: { 'to' => '+123456789', 'from' => sms_channel.phone_number, 'text' => 'test', 'applicationId' => '1' }.to_json
)
described_class.new(message: message).perform
expect(message.reload.source_id).to eq('123456789')
end
it 'calls channel.send_message with attachments' do
message = build(:message, message_type: :outgoing, content: 'test',
conversation: conversation)
attachment = message.attachments.new(account_id: message.account_id, file_type: :image)
attachment.file.attach(io: Rails.root.join('spec/assets/avatar.png').open, filename: 'avatar.png', content_type: 'image/png')
attachment2 = message.attachments.new(account_id: message.account_id, file_type: :image)
attachment2.file.attach(io: Rails.root.join('spec/assets/avatar.png').open, filename: 'avatar.png', content_type: 'image/png')
message.save!
allow(HTTParty).to receive(:post).and_return(sms_request)
allow(sms_request).to receive(:success?).and_return(true)
allow(sms_request).to receive(:parsed_response).and_return({ 'id' => '123456789' })
allow(attachment).to receive(:download_url).and_return('url1')
allow(attachment2).to receive(:download_url).and_return('url2')
expect(HTTParty).to receive(:post).with(
'https://messaging.bandwidth.com/api/v2/users/1/messages',
basic_auth: { username: '1', password: '1' },
headers: { 'Content-Type' => 'application/json' },
body: { 'to' => '+123456789', 'from' => sms_channel.phone_number, 'text' => 'test', 'applicationId' => '1',
'media' => %w[url1 url2] }.to_json
)
described_class.new(message: message).perform
expect(message.reload.source_id).to eq('123456789')
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Sms::OneoffSmsCampaignService
pattr_initialize [:campaign!]
def perform
raise "Invalid campaign #{campaign.id}" if campaign.inbox.inbox_type != 'Sms' || !campaign.one_off?
raise 'Completed Campaign' if campaign.completed?
# marks campaign completed so that other jobs won't pick it up
campaign.completed!
audience_label_ids = campaign.audience.select { |audience| audience['type'] == 'Label' }.pluck('id')
audience_labels = campaign.account.labels.where(id: audience_label_ids).pluck(:title)
process_audience(audience_labels)
end
private
delegate :inbox, to: :campaign
delegate :channel, to: :inbox
def process_audience(audience_labels)
campaign.account.contacts.tagged_with(audience_labels, any: true).each do |contact|
next if contact.phone_number.blank?
send_message(to: contact.phone_number, content: campaign.message)
end
end
def send_message(to:, content:)
channel.send_text_message(to, content)
end
end
``` | require 'rails_helper'
describe Sms::OneoffSmsCampaignService do
subject(:sms_campaign_service) { described_class.new(campaign: campaign) }
let(:account) { create(:account) }
let!(:sms_channel) { create(:channel_sms) }
let!(:sms_inbox) { create(:inbox, channel: sms_channel) }
let(:label1) { create(:label, account: account) }
let(:label2) { create(:label, account: account) }
let!(:campaign) do
create(:campaign, inbox: sms_inbox, account: account,
audience: [{ type: 'Label', id: label1.id }, { type: 'Label', id: label2.id }])
end
describe 'perform' do
before do
stub_request(:post, 'https://messaging.bandwidth.com/api/v2/users/1/messages').to_return(
status: 200,
body: { 'id' => '1' }.to_json,
headers: {}
)
end
it 'raises error if the campaign is completed' do
campaign.completed!
expect { sms_campaign_service.perform }.to raise_error 'Completed Campaign'
end
it 'raises error invalid campaign when its not a oneoff sms campaign' do
campaign = create(:campaign)
expect { described_class.new(campaign: campaign).perform }.to raise_error "Invalid campaign #{campaign.id}"
end
it 'send messages to contacts in the audience and marks the campaign completed' do
contact_with_label1, contact_with_label2, contact_with_both_labels = FactoryBot.create_list(:contact, 3, :with_phone_number, account: account)
contact_with_label1.update_labels([label1.title])
contact_with_label2.update_labels([label2.title])
contact_with_both_labels.update_labels([label1.title, label2.title])
sms_campaign_service.perform
assert_requested(:post, 'https://messaging.bandwidth.com/api/v2/users/1/messages', times: 3)
expect(campaign.reload.completed?).to be true
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Notification::PushNotificationService
include Rails.application.routes.url_helpers
pattr_initialize [:notification!]
def perform
return unless user_subscribed_to_notification?
notification_subscriptions.each do |subscription|
send_browser_push(subscription)
send_fcm_push(subscription)
send_push_via_chatwoot_hub(subscription)
end
end
private
delegate :user, to: :notification
delegate :notification_subscriptions, to: :user
delegate :notification_settings, to: :user
def user_subscribed_to_notification?
notification_setting = notification_settings.find_by(account_id: notification.account.id)
return true if notification_setting.public_send("push_#{notification.notification_type}?")
false
end
def conversation
@conversation ||= notification.conversation
end
def push_message
{
title: notification.push_message_title,
tag: "#{notification.notification_type}_#{conversation.display_id}_#{notification.id}",
url: push_url
}
end
def push_url
app_account_conversation_url(account_id: conversation.account_id, id: conversation.display_id)
end
def send_browser_push?(subscription)
VapidService.public_key && subscription.browser_push?
end
def send_browser_push(subscription)
return unless send_browser_push?(subscription)
WebPush.payload_send(
message: JSON.generate(push_message),
endpoint: subscription.subscription_attributes['endpoint'],
p256dh: subscription.subscription_attributes['p256dh'],
auth: subscription.subscription_attributes['auth'],
vapid: {
subject: push_url,
public_key: VapidService.public_key,
private_key: VapidService.private_key
},
ssl_timeout: 5,
open_timeout: 5,
read_timeout: 5
)
rescue WebPush::ExpiredSubscription
subscription.destroy!
rescue Errno::ECONNRESET, Net::OpenTimeout, Net::ReadTimeout => e
Rails.logger.error "WebPush operation error: #{e.message}"
end
def send_fcm_push(subscription)
return unless ENV['FCM_SERVER_KEY']
return unless subscription.fcm?
fcm = FCM.new(ENV.fetch('FCM_SERVER_KEY', nil))
response = fcm.send([subscription.subscription_attributes['push_token']], fcm_options)
remove_subscription_if_error(subscription, response)
end
def send_push_via_chatwoot_hub(subscription)
return if ENV['FCM_SERVER_KEY']
return unless ActiveModel::Type::Boolean.new.cast(ENV.fetch('ENABLE_PUSH_RELAY_SERVER', true))
return unless subscription.fcm?
ChatwootHub.send_browser_push([subscription.subscription_attributes['push_token']], fcm_options)
end
def remove_subscription_if_error(subscription, response)
subscription.destroy! if JSON.parse(response[:body])['results']&.first&.keys&.include?('error')
end
def fcm_options
{
notification: {
title: notification.notification_type.titleize,
body: notification.push_message_title,
sound: 'default'
},
android: { priority: 'high' },
data: { notification: notification.fcm_push_data.to_json },
collapse_key: "chatwoot_#{notification.primary_actor_type.downcase}_#{notification.primary_actor_id}"
}
end
end
``` | require 'rails_helper'
describe Notification::PushNotificationService do
let!(:account) { create(:account) }
let!(:user) { create(:user, account: account) }
let!(:notification) { create(:notification, user: user, account: user.accounts.first) }
let(:fcm_double) { double }
before do
allow(WebPush).to receive(:payload_send).and_return(true)
allow(FCM).to receive(:new).and_return(fcm_double)
allow(fcm_double).to receive(:send).and_return({ body: { 'results': [] }.to_json })
end
describe '#perform' do
it 'sends webpush notifications for webpush subscription' do
with_modified_env VAPID_PUBLIC_KEY: 'test' do
create(:notification_subscription, user: notification.user)
described_class.new(notification: notification).perform
expect(WebPush).to have_received(:payload_send)
expect(FCM).not_to have_received(:new)
end
end
it 'sends a fcm notification for firebase subscription' do
with_modified_env FCM_SERVER_KEY: 'test', ENABLE_PUSH_RELAY_SERVER: 'false' do
create(:notification_subscription, user: notification.user, subscription_type: 'fcm')
described_class.new(notification: notification).perform
expect(FCM).to have_received(:new)
expect(WebPush).not_to have_received(:payload_send)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# refer: https://gitlab.com/gitlab-org/ruby/gems/gitlab-mail_room/-/blob/master/lib/mail_room/microsoft_graph/connection.rb
# refer: https://github.com/microsoftgraph/msgraph-sample-rubyrailsapp/tree/b4a6869fe4a438cde42b161196484a929f1bee46
# https://learn.microsoft.com/en-us/azure/active-directory/develop/active-directory-configurable-token-lifetimes
class Microsoft::RefreshOauthTokenService
pattr_initialize [:channel!]
# if the token is not expired yet then skip the refresh token step
def access_token
provider_config = channel.provider_config.with_indifferent_access
if Time.current.utc >= expires_on(provider_config['expires_on'])
# Token expired, refresh
new_hash = refresh_tokens
new_hash[:access_token]
else
provider_config[:access_token]
end
end
def expires_on(expiry)
# we will give it a 5 minute gap for safety
expiry.presence ? DateTime.parse(expiry) - 5.minutes : Time.current.utc
end
# <RefreshTokensSnippet>
def refresh_tokens
token_hash = channel.provider_config.with_indifferent_access
oauth_strategy = ::MicrosoftGraphAuth.new(
nil, ENV.fetch('AZURE_APP_ID', nil), ENV.fetch('AZURE_APP_SECRET', nil)
)
token_service = OAuth2::AccessToken.new(
oauth_strategy.client, token_hash['access_token'],
refresh_token: token_hash['refresh_token']
)
# Refresh the tokens
new_tokens = token_service.refresh!.to_hash.slice(:access_token, :refresh_token, :expires_at)
update_channel_provider_config(new_tokens)
channel.provider_config
end
# </RefreshTokensSnippet>
def update_channel_provider_config(new_tokens)
new_tokens = new_tokens.with_indifferent_access
channel.provider_config = {
access_token: new_tokens[:access_token],
refresh_token: new_tokens[:refresh_token],
expires_on: Time.at(new_tokens[:expires_at]).utc.to_s
}
channel.save!
end
end
``` | require 'rails_helper'
RSpec.describe Microsoft::RefreshOauthTokenService do
let(:access_token) { SecureRandom.hex }
let(:refresh_token) { SecureRandom.hex }
let(:expires_on) { Time.zone.now + 3600 }
let!(:microsoft_email_channel) do
create(:channel_email, provider_config: { access_token: access_token, refresh_token: refresh_token, expires_on: expires_on })
end
let(:new_tokens) { { access_token: access_token, refresh_token: refresh_token, expires_at: expires_on.to_i, token_type: 'bearer' } }
describe '#access_token' do
context 'when token is not expired' do
it 'returns the existing access token' do
expect(described_class.new(channel: microsoft_email_channel).access_token).to eq(access_token)
expect(microsoft_email_channel.reload.provider_config['refresh_token']).to eq(refresh_token)
end
end
context 'when token is expired' do
let(:expires_on) { 1.minute.from_now }
before do
stub_request(:post, 'https://login.microsoftonline.com/common/oauth2/v2.0/token').with(
body: { 'grant_type' => 'refresh_token', 'refresh_token' => refresh_token }
).to_return(status: 200, body: new_tokens.to_json, headers: { 'Content-Type' => 'application/json' })
end
it 'fetches new access token and refresh tokens' do
microsoft_email_channel.provider_config['expires_on'] = Time.zone.now - 3600
microsoft_email_channel.save!
expect(described_class.new(channel: microsoft_email_channel).access_token).not_to eq(access_token)
expect(microsoft_email_channel.reload.provider_config['access_token']).to eq(new_tokens[:access_token])
expect(microsoft_email_channel.reload.provider_config['refresh_token']).to eq(new_tokens[:refresh_token])
expect(microsoft_email_channel.reload.provider_config['expires_on']).to eq(Time.at(new_tokens[:expires_at]).utc.to_s)
end
end
context 'when refresh token is not present in provider config and access token is expired' do
it 'throws an error' do
microsoft_email_channel.update(provider_config: {
access_token: access_token,
expires_on: expires_on - 3600
})
expect do
described_class.new(channel: microsoft_email_channel).access_token
end.to raise_error(RuntimeError, 'A refresh_token is not available')
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Contacts::ContactableInboxesService
pattr_initialize [:contact!]
def get
account = contact.account
account.inboxes.filter_map { |inbox| get_contactable_inbox(inbox) }
end
private
def get_contactable_inbox(inbox)
case inbox.channel_type
when 'Channel::TwilioSms'
twilio_contactable_inbox(inbox)
when 'Channel::Whatsapp'
whatsapp_contactable_inbox(inbox)
when 'Channel::Sms'
sms_contactable_inbox(inbox)
when 'Channel::Email'
email_contactable_inbox(inbox)
when 'Channel::Api'
api_contactable_inbox(inbox)
when 'Channel::WebWidget'
website_contactable_inbox(inbox)
end
end
def website_contactable_inbox(inbox)
latest_contact_inbox = inbox.contact_inboxes.where(contact: @contact).last
return unless latest_contact_inbox
# FIXME : change this when multiple conversations comes in
return if latest_contact_inbox.conversations.present?
{ source_id: latest_contact_inbox.source_id, inbox: inbox }
end
def api_contactable_inbox(inbox)
latest_contact_inbox = inbox.contact_inboxes.where(contact: @contact).last
source_id = latest_contact_inbox&.source_id || SecureRandom.uuid
{ source_id: source_id, inbox: inbox }
end
def email_contactable_inbox(inbox)
return unless @contact.email
{ source_id: @contact.email, inbox: inbox }
end
def whatsapp_contactable_inbox(inbox)
return unless @contact.phone_number
# Remove the plus since thats the format 360 dialog uses
{ source_id: @contact.phone_number.delete('+'), inbox: inbox }
end
def sms_contactable_inbox(inbox)
return unless @contact.phone_number
{ source_id: @contact.phone_number, inbox: inbox }
end
def twilio_contactable_inbox(inbox)
return if @contact.phone_number.blank?
case inbox.channel.medium
when 'sms'
{ source_id: @contact.phone_number, inbox: inbox }
when 'whatsapp'
{ source_id: "whatsapp:#{@contact.phone_number}", inbox: inbox }
end
end
end
``` | require 'rails_helper'
describe Contacts::ContactableInboxesService do
before do
stub_request(:post, /graph.facebook.com/)
end
let(:account) { create(:account) }
let(:contact) { create(:contact, account: account, email: '[email protected]', phone_number: '+2320000') }
let!(:twilio_sms) { create(:channel_twilio_sms, account: account) }
let!(:twilio_sms_inbox) { create(:inbox, channel: twilio_sms, account: account) }
let!(:twilio_whatsapp) { create(:channel_twilio_sms, medium: :whatsapp, account: account) }
let!(:twilio_whatsapp_inbox) { create(:inbox, channel: twilio_whatsapp, account: account) }
let!(:email_channel) { create(:channel_email, account: account) }
let!(:email_inbox) { create(:inbox, channel: email_channel, account: account) }
let!(:api_channel) { create(:channel_api, account: account) }
let!(:api_inbox) { create(:inbox, channel: api_channel, account: account) }
let!(:website_inbox) { create(:inbox, channel: create(:channel_widget, account: account), account: account) }
let!(:sms_inbox) { create(:inbox, channel: create(:channel_sms, account: account), account: account) }
describe '#get' do
it 'returns the contactable inboxes for the contact' do
contactable_inboxes = described_class.new(contact: contact).get
expect(contactable_inboxes).to include({ source_id: contact.phone_number, inbox: twilio_sms_inbox })
expect(contactable_inboxes).to include({ source_id: "whatsapp:#{contact.phone_number}", inbox: twilio_whatsapp_inbox })
expect(contactable_inboxes).to include({ source_id: contact.email, inbox: email_inbox })
expect(contactable_inboxes).to include({ source_id: contact.phone_number, inbox: sms_inbox })
end
it 'doest not return the non contactable inboxes for the contact' do
facebook_channel = create(:channel_facebook_page, account: account)
facebook_inbox = create(:inbox, channel: facebook_channel, account: account)
twitter_channel = create(:channel_twitter_profile, account: account)
twitter_inbox = create(:inbox, channel: twitter_channel, account: account)
contactable_inboxes = described_class.new(contact: contact).get
expect(contactable_inboxes.pluck(:inbox)).not_to include(website_inbox)
expect(contactable_inboxes.pluck(:inbox)).not_to include(facebook_inbox)
expect(contactable_inboxes.pluck(:inbox)).not_to include(twitter_inbox)
end
context 'when api inbox is available' do
it 'returns existing source id if contact inbox exists' do
contact_inbox = create(:contact_inbox, inbox: api_inbox, contact: contact)
contactable_inboxes = described_class.new(contact: contact).get
expect(contactable_inboxes).to include({ source_id: contact_inbox.source_id, inbox: api_inbox })
end
end
context 'when website inbox is available' do
it 'returns existing source id if contact inbox exists without any conversations' do
contact_inbox = create(:contact_inbox, inbox: website_inbox, contact: contact)
contactable_inboxes = described_class.new(contact: contact).get
expect(contactable_inboxes).to include({ source_id: contact_inbox.source_id, inbox: website_inbox })
end
it 'does not return existing source id if contact inbox exists with conversations' do
contact_inbox = create(:contact_inbox, inbox: website_inbox, contact: contact)
create(:conversation, contact: contact, inbox: website_inbox, contact_inbox: contact_inbox)
contactable_inboxes = described_class.new(contact: contact).get
expect(contactable_inboxes.pluck(:inbox)).not_to include(website_inbox)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Contacts::FilterService < FilterService
ATTRIBUTE_MODEL = 'contact_attribute'.freeze
def perform
@contacts = contact_query_builder
{
contacts: @contacts,
count: @contacts.count
}
end
def contact_query_builder
contact_filters = @filters['contacts']
@params[:payload].each_with_index do |query_hash, current_index|
current_filter = contact_filters[query_hash['attribute_key']]
@query_string += contact_query_string(current_filter, query_hash, current_index)
end
base_relation.where(@query_string, @filter_values.with_indifferent_access)
end
def contact_query_string(current_filter, query_hash, current_index)
attribute_key = query_hash[:attribute_key]
query_operator = query_hash[:query_operator]
filter_operator_value = filter_operation(query_hash, current_index)
return custom_attribute_query(query_hash, 'contact_attribute', current_index) if current_filter.nil?
case current_filter['attribute_type']
when 'additional_attributes'
" LOWER(contacts.additional_attributes ->> '#{attribute_key}') #{filter_operator_value} #{query_operator} "
when 'date_attributes'
" (contacts.#{attribute_key})::#{current_filter['data_type']} #{filter_operator_value}#{current_filter['data_type']} #{query_operator} "
when 'standard'
if attribute_key == 'labels'
" #{tag_filter_query('Contact', 'contacts', query_hash, current_index)} "
else
" LOWER(contacts.#{attribute_key}) #{filter_operator_value} #{query_operator} "
end
end
end
def filter_values(query_hash)
current_val = query_hash['values'][0]
if query_hash['attribute_key'] == 'phone_number'
"+#{current_val}"
elsif query_hash['attribute_key'] == 'country_code'
current_val.downcase
else
current_val.is_a?(String) ? current_val.downcase : current_val
end
end
def base_relation
Current.account.contacts
end
private
def equals_to_filter_string(filter_operator, current_index)
return "= :value_#{current_index}" if filter_operator == 'equal_to'
"!= :value_#{current_index}"
end
end
``` | require 'rails_helper'
describe Contacts::FilterService do
subject(:filter_service) { described_class }
let!(:account) { create(:account) }
let!(:first_user) { create(:user, account: account) }
let!(:second_user) { create(:user, account: account) }
let!(:inbox) { create(:inbox, account: account, enable_auto_assignment: false) }
let(:en_contact) { create(:contact, account: account, additional_attributes: { 'browser_language': 'en' }) }
let(:el_contact) { create(:contact, account: account, additional_attributes: { 'browser_language': 'el' }) }
let(:cs_contact) { create(:contact, account: account, additional_attributes: { 'browser_language': 'cs' }) }
before do
create(:inbox_member, user: first_user, inbox: inbox)
create(:inbox_member, user: second_user, inbox: inbox)
create(:conversation, account: account, inbox: inbox, assignee: first_user, contact: en_contact)
create(:conversation, account: account, inbox: inbox, contact: el_contact)
Current.account = account
create(:custom_attribute_definition,
attribute_key: 'contact_additional_information',
account: account,
attribute_model: 'contact_attribute',
attribute_display_type: 'text')
create(:custom_attribute_definition,
attribute_key: 'customer_type',
account: account,
attribute_model: 'contact_attribute',
attribute_display_type: 'list',
attribute_values: %w[regular platinum gold])
create(:custom_attribute_definition,
attribute_key: 'signed_in_at',
account: account,
attribute_model: 'contact_attribute',
attribute_display_type: 'date')
end
describe '#perform' do
before do
en_contact.update_labels(%w[random_label support])
cs_contact.update_labels('support')
en_contact.update!(custom_attributes: { contact_additional_information: 'test custom data' })
el_contact.update!(custom_attributes: { contact_additional_information: 'test custom data', customer_type: 'platinum' })
cs_contact.update!(custom_attributes: { customer_type: 'platinum', signed_in_at: '2022-01-19' })
end
context 'with query present' do
let!(:params) { { payload: [], page: 1 } }
let(:payload) do
[
{
attribute_key: 'browser_language',
filter_operator: 'equal_to',
values: ['en'],
query_operator: nil
}.with_indifferent_access
]
end
context 'with label filter' do
it 'returns equal_to filter results properly' do
params[:payload] = [
{
attribute_key: 'labels',
filter_operator: 'equal_to',
values: ['support'],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be 2
expect(result[:contacts].first.label_list).to include('support')
expect(result[:contacts].last.label_list).to include('support')
end
it 'returns not_equal_to filter results properly' do
params[:payload] = [
{
attribute_key: 'labels',
filter_operator: 'not_equal_to',
values: ['support'],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be 1
expect(result[:contacts].first.id).to eq el_contact.id
end
it 'returns is_present filter results properly' do
params[:payload] = [
{
attribute_key: 'labels',
filter_operator: 'is_present',
values: [],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be 2
expect(result[:contacts].first.label_list).to include('support')
expect(result[:contacts].last.label_list).to include('support')
end
it 'returns is_not_present filter results properly' do
params[:payload] = [
{
attribute_key: 'labels',
filter_operator: 'is_not_present',
values: [],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be 1
expect(result[:contacts].first.id).to eq el_contact.id
end
end
it 'filter contacts by additional_attributes' do
params[:payload] = payload
result = filter_service.new(params, first_user).perform
expect(result[:count]).to be 1
expect(result[:contacts].first.id).to eq(en_contact.id)
end
it 'filter contacts by name' do
params[:payload] = [
{
attribute_key: 'name',
filter_operator: 'equal_to',
values: [en_contact.name],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expect(result[:count]).to be 1
expect(result[:contacts].length).to be 1
expect(result[:contacts].first.name).to eq(en_contact.name)
end
it 'filter by custom_attributes and labels' do
params[:payload] = [
{
attribute_key: 'customer_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'labels',
filter_operator: 'equal_to',
values: ['support'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'signed_in_at',
filter_operator: 'is_less_than',
values: ['2022-01-20'],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be 1
expect(result[:contacts].first.id).to eq(cs_contact.id)
end
it 'filter by custom_attributes and additional_attributes' do
params[:payload] = [
{
attribute_key: 'customer_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'browser_language',
filter_operator: 'equal_to',
values: ['el'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'contact_additional_information',
filter_operator: 'equal_to',
values: ['test custom data'],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be 1
expect(result[:contacts].first.id).to eq(el_contact.id)
end
it 'filter by created_at and custom_attributes' do
tomorrow = Date.tomorrow.strftime
params[:payload] = [
{
attribute_key: 'customer_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'created_at',
filter_operator: 'is_less_than',
values: [tomorrow.to_s],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, first_user).perform
expected_count = Contact.where("created_at < ? AND custom_attributes->>'customer_type' = ?", Date.tomorrow, 'platinum').count
expect(result[:contacts].length).to be expected_count
expect(result[:contacts].pluck(:id)).to include(el_contact.id)
end
context 'with x_days_before filter' do
before do
Time.zone = 'UTC'
el_contact.update(last_activity_at: (Time.zone.today - 4.days))
cs_contact.update(last_activity_at: (Time.zone.today - 5.days))
en_contact.update(last_activity_at: (Time.zone.today - 2.days))
end
it 'filter by last_activity_at 3_days_before and custom_attributes' do
params[:payload] = [
{
attribute_key: 'last_activity_at',
filter_operator: 'days_before',
values: [3],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'contact_additional_information',
filter_operator: 'equal_to',
values: ['test custom data'],
query_operator: nil
}.with_indifferent_access
]
expected_count = Contact.where(
"last_activity_at < ? AND
custom_attributes->>'contact_additional_information' = ?",
(Time.zone.today - 3.days),
'test custom data'
).count
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be expected_count
expect(result[:contacts].first.id).to eq(el_contact.id)
end
it 'filter by last_activity_at 2_days_before and custom_attributes' do
params[:payload] = [
{
attribute_key: 'last_activity_at',
filter_operator: 'days_before',
values: [2],
query_operator: nil
}.with_indifferent_access
]
expected_count = Contact.where('last_activity_at < ?', (Time.zone.today - 2.days)).count
result = filter_service.new(params, first_user).perform
expect(result[:contacts].length).to be expected_count
expect(result[:contacts].pluck(:id)).to include(el_contact.id)
expect(result[:contacts].pluck(:id)).to include(cs_contact.id)
expect(result[:contacts].pluck(:id)).not_to include(en_contact.id)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Labels::UpdateService
pattr_initialize [:new_label_title!, :old_label_title!, :account_id!]
def perform
tagged_conversations.find_in_batches do |conversation_batch|
conversation_batch.each do |conversation|
conversation.label_list.remove(old_label_title)
conversation.label_list.add(new_label_title)
conversation.save!
end
end
tagged_contacts.find_in_batches do |contact_batch|
contact_batch.each do |contact|
contact.label_list.remove(old_label_title)
contact.label_list.add(new_label_title)
contact.save!
end
end
end
private
def tagged_conversations
account.conversations.tagged_with(old_label_title)
end
def tagged_contacts
account.contacts.tagged_with(old_label_title)
end
def account
@account ||= Account.find(account_id)
end
end
``` | require 'rails_helper'
describe Labels::UpdateService do
let(:account) { create(:account) }
let(:conversation) { create(:conversation, account: account) }
let(:label) { create(:label, account: account) }
let(:contact) { conversation.contact }
before do
conversation.label_list.add(label.title)
conversation.save!
contact.label_list.add(label.title)
contact.save!
end
describe '#perform' do
it 'updates associated conversations/contacts labels' do
expect(conversation.label_list).to eq([label.title])
expect(contact.label_list).to eq([label.title])
described_class.new(
new_label_title: 'updated-label-title',
old_label_title: label.title,
account_id: account.id
).perform
expect(conversation.reload.label_list).to eq(['updated-label-title'])
expect(contact.reload.label_list).to eq(['updated-label-title'])
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Internal::RemoveStaleContactInboxesService
def perform
return unless remove_stale_contact_inbox_job_enabled?
time_period = 90.days.ago
contact_inboxes_to_delete = stale_contact_inboxes(time_period)
log_stale_contact_inboxes_deletion(contact_inboxes_to_delete, time_period)
# Since the number of records to delete is very high,
# delete_all would be faster than destroy_all since it operates at database level
# and avoid loading all the records in memory
# Transaction and batching is used to avoid deadlock and memory issues
ContactInbox.transaction do
contact_inboxes_to_delete
.find_in_batches(batch_size: 10_000) do |group|
ContactInbox.where(id: group.map(&:id)).delete_all
end
end
end
private
def remove_stale_contact_inbox_job_enabled?
job_status = ENV.fetch('REMOVE_STALE_CONTACT_INBOX_JOB_STATUS', false)
return false unless ActiveModel::Type::Boolean.new.cast(job_status)
true
end
def stale_contact_inboxes(time_period)
ContactInbox.stale_without_conversations(time_period)
end
def log_stale_contact_inboxes_deletion(contact_inboxes, time_period)
count = contact_inboxes.count
Rails.logger.info "Deleting #{count} stale contact inboxes older than #{time_period}"
# Log the SQL query without executing it
sql_query = contact_inboxes.to_sql
Rails.logger.info("SQL Query: #{sql_query}")
end
end
``` | # spec/services/remove_stale_contact_inboxes_service_spec.rb
require 'rails_helper'
RSpec.describe Internal::RemoveStaleContactInboxesService do
describe '#perform' do
it 'does not delete stale contact inboxes if REMOVE_STALE_CONTACT_INBOX_JOB_STATUS is false' do
# default value of REMOVE_STALE_CONTACT_INBOX_JOB_STATUS is false
create(:contact_inbox, created_at: 3.days.ago)
create(:contact_inbox, created_at: 91.days.ago)
create(:contact_inbox, created_at: 92.days.ago)
create(:contact_inbox, created_at: 93.days.ago)
create(:contact_inbox, created_at: 94.days.ago)
service = described_class.new
expect { service.perform }.not_to change(ContactInbox, :count)
end
it 'deletes stale contact inboxes' do
with_modified_env REMOVE_STALE_CONTACT_INBOX_JOB_STATUS: 'true' do
create(:contact_inbox, created_at: 3.days.ago)
create(:contact_inbox, created_at: 91.days.ago)
create(:contact_inbox, created_at: 92.days.ago)
create(:contact_inbox, created_at: 93.days.ago)
create(:contact_inbox, created_at: 94.days.ago)
service = described_class.new
expect { service.perform }.to change(ContactInbox, :count).by(-4)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class AgentBots::ValidateBotService
pattr_initialize [:agent_bot]
def perform
return true unless agent_bot.bot_type == 'csml'
validate_csml_bot
end
private
def csml_client
@csml_client ||= CsmlEngine.new
end
def csml_bot_payload
{
id: agent_bot[:name],
name: agent_bot[:name],
default_flow: 'Default',
flows: [
{
id: SecureRandom.uuid,
name: 'Default',
content: agent_bot.bot_config['csml_content'],
commands: []
}
]
}
end
def validate_csml_bot
response = csml_client.validate(csml_bot_payload)
response.blank? || response['valid']
rescue StandardError => e
ChatwootExceptionTracker.new(e, account: agent_bot&.account).capture_exception
false
end
end
``` | require 'rails_helper'
describe AgentBots::ValidateBotService do
describe '#perform' do
it 'returns true if bot_type is not csml' do
agent_bot = create(:agent_bot)
valid = described_class.new(agent_bot: agent_bot).perform
expect(valid).to be true
end
it 'returns true if validate csml returns true' do
agent_bot = create(:agent_bot, :skip_validate, bot_type: 'csml', bot_config: {})
csml_client = double
csml_response = double
allow(CsmlEngine).to receive(:new).and_return(csml_client)
allow(csml_client).to receive(:validate).and_return(csml_response)
allow(csml_response).to receive(:blank?).and_return(false)
allow(csml_response).to receive(:[]).with('valid').and_return(true)
valid = described_class.new(agent_bot: agent_bot).perform
expect(valid).to be true
expect(CsmlEngine).to have_received(:new)
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Macros::ExecutionService < ActionService
def initialize(macro, conversation, user)
super(conversation)
@macro = macro
@account = macro.account
@user = user
Current.user = user
end
def perform
@macro.actions.each do |action|
action = action.with_indifferent_access
begin
send(action[:action_name], action[:action_params])
rescue StandardError => e
ChatwootExceptionTracker.new(e, account: @account).capture_exception
end
end
ensure
Current.reset
end
private
def assign_agent(agent_ids)
agent_ids = agent_ids.map { |id| id == 'self' ? @user.id : id }
super(agent_ids)
end
def add_private_note(message)
return if conversation_a_tweet?
params = { content: message[0], private: true }
# Added reload here to ensure conversation us persistent with the latest updates
mb = Messages::MessageBuilder.new(@user, @conversation.reload, params)
mb.perform
end
def send_message(message)
return if conversation_a_tweet?
params = { content: message[0], private: false }
# Added reload here to ensure conversation us persistent with the latest updates
mb = Messages::MessageBuilder.new(@user, @conversation.reload, params)
mb.perform
end
def send_attachment(blob_ids)
return if conversation_a_tweet?
return unless @macro.files.attached?
blobs = ActiveStorage::Blob.where(id: blob_ids)
return if blobs.blank?
params = { content: nil, private: false, attachments: blobs }
# Added reload here to ensure conversation us persistent with the latest updates
mb = Messages::MessageBuilder.new(@user, @conversation.reload, params)
mb.perform
end
end
``` | require 'rails_helper'
RSpec.describe Macros::ExecutionService, type: :service do
let(:account) { create(:account) }
let(:conversation) { create(:conversation, account: account) }
let(:user) { create(:user, account: account) }
let(:macro) { create(:macro, account: account) }
let(:service) { described_class.new(macro, conversation, user) }
before do
create(:inbox_member, user: user, inbox: conversation.inbox)
end
describe '#perform' do
context 'when actions are present' do
before do
allow(macro).to receive(:actions).and_return([
{ action_name: 'assign_agent', action_params: ['self'] },
{ action_name: 'add_private_note', action_params: ['Test note'] },
{ action_name: 'send_message', action_params: ['Test message'] },
{ action_name: 'send_attachment', action_params: [1, 2] }
])
end
it 'executes the actions' do
expect(service).to receive(:assign_agent).with(['self']).and_call_original
expect(service).to receive(:add_private_note).with(['Test note']).and_call_original
expect(service).to receive(:send_message).with(['Test message']).and_call_original
expect(service).to receive(:send_attachment).with([1, 2]).and_call_original
service.perform
end
context 'when an action raises an error' do
let(:exception_tracker) { instance_spy(ChatwootExceptionTracker) }
before do
allow(ChatwootExceptionTracker).to receive(:new).and_return(exception_tracker)
end
it 'captures the exception' do
allow(service).to receive(:assign_agent).and_raise(StandardError.new('Random error'))
expect(exception_tracker).to receive(:capture_exception)
service.perform
end
end
end
end
describe '#assign_agent' do
context 'when agent_ids contains self' do
it 'updates the conversation assignee to the current user' do
service.send(:assign_agent, ['self'])
expect(conversation.reload.assignee).to eq(user)
end
end
context 'when agent_ids does not contain self' do
let(:other_user) { create(:user, account: account) }
before do
create(:inbox_member, user: other_user, inbox: conversation.inbox)
end
it 'calls the super method' do
service.send(:assign_agent, [other_user.id])
expect(conversation.reload.assignee).to eq(other_user)
end
end
end
describe '#add_private_note' do
context 'when conversation is not a tweet' do
it 'creates a new private message' do
expect do
service.send(:add_private_note, ['Test private note'])
end.to change(Message, :count).by(1)
message = Message.last
expect(message.content).to eq('Test private note')
expect(message.private).to be(true)
end
end
context 'when conversation is a tweet' do
before { allow(service).to receive(:conversation_a_tweet?).and_return(true) }
it 'does not create a new message' do
expect do
service.send(:add_private_note, ['Test private note'])
end.not_to change(Message, :count)
end
end
end
describe '#send_message' do
context 'when conversation is not a tweet' do
it 'creates a new public message' do
expect do
service.send(:send_message, ['Test message'])
end.to change(Message, :count).by(1)
message = Message.last
expect(message.content).to eq('Test message')
expect(message.private).to be(false)
end
end
context 'when conversation is a tweet' do
before { allow(service).to receive(:conversation_a_tweet?).and_return(true) }
it 'does not create a new message' do
expect do
service.send(:send_message, ['Test message'])
end.not_to change(Message, :count)
end
end
end
describe '#send_attachment' do
before do
macro.files.attach(io: Rails.root.join('spec/assets/avatar.png').open, filename: 'avatar.png', content_type: 'image/png')
macro.save!
end
context 'when conversation is not a tweet and macro has files attached' do
before { allow(service).to receive(:conversation_a_tweet?).and_return(false) }
it 'creates a new message with attachments' do
expect do
service.send(:send_attachment, [macro.files.first.blob_id])
end.to change(Message, :count).by(1)
message = Message.last
expect(message.attachments).to be_present
end
end
context 'when conversation is a tweet or macro has no files attached' do
before { allow(service).to receive(:conversation_a_tweet?).and_return(true) }
it 'does not create a new message' do
expect do
service.send(:send_attachment, [macro.files.first.blob_id])
end.not_to change(Message, :count)
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Facebook::SendOnFacebookService < Base::SendOnChannelService
private
def channel_class
Channel::FacebookPage
end
def perform_reply
send_message_to_facebook fb_text_message_params if message.content.present?
send_message_to_facebook fb_attachment_message_params if message.attachments.present?
rescue Facebook::Messenger::FacebookError => e
# TODO : handle specific errors or else page will get disconnected
handle_facebook_error(e)
message.update!(status: :failed, external_error: e.message)
end
def send_message_to_facebook(delivery_params)
result = Facebook::Messenger::Bot.deliver(delivery_params, page_id: channel.page_id)
parsed_result = JSON.parse(result)
if parsed_result['error'].present?
message.update!(status: :failed, external_error: external_error(parsed_result))
Rails.logger.info "Facebook::SendOnFacebookService: Error sending message to Facebook : Page - #{channel.page_id} : #{result}"
end
message.update!(source_id: parsed_result['message_id']) if parsed_result['message_id'].present?
end
def fb_text_message_params
{
recipient: { id: contact.get_source_id(inbox.id) },
message: { text: message.content },
messaging_type: 'MESSAGE_TAG',
tag: 'ACCOUNT_UPDATE'
}
end
def external_error(response)
# https://developers.facebook.com/docs/graph-api/guides/error-handling/
error_message = response['error']['message']
error_code = response['error']['code']
"#{error_code} - #{error_message}"
end
def fb_attachment_message_params
attachment = message.attachments.first
{
recipient: { id: contact.get_source_id(inbox.id) },
message: {
attachment: {
type: attachment_type(attachment),
payload: {
url: attachment.download_url
}
}
},
messaging_type: 'MESSAGE_TAG',
tag: 'ACCOUNT_UPDATE'
}
end
def attachment_type(attachment)
return attachment.file_type if %w[image audio video file].include? attachment.file_type
'file'
end
def fb_message_params
if message.attachments.blank?
fb_text_message_params
else
fb_attachment_message_params
end
end
def sent_first_outgoing_message_after_24_hours?
# we can send max 1 message after 24 hour window
conversation.messages.outgoing.where('id > ?', conversation.last_incoming_message.id).count == 1
end
def handle_facebook_error(exception)
# Refer: https://github.com/jgorset/facebook-messenger/blob/64fe1f5cef4c1e3fca295b205037f64dfebdbcab/lib/facebook/messenger/error.rb
return unless exception.to_s.include?('The session has been invalidated') || exception.to_s.include?('Error validating access token')
channel.authorization_error!
end
end
``` | require 'rails_helper'
describe Facebook::SendOnFacebookService do
subject(:send_reply_service) { described_class.new(message: message) }
before do
allow(Facebook::Messenger::Subscriptions).to receive(:subscribe).and_return(true)
allow(bot).to receive(:deliver).and_return({ recipient_id: '1008372609250235', message_id: 'mid.1456970487936:c34767dfe57ee6e339' }.to_json)
create(:message, message_type: :incoming, inbox: facebook_inbox, account: account, conversation: conversation)
end
let!(:account) { create(:account) }
let(:bot) { class_double(Facebook::Messenger::Bot).as_stubbed_const }
let!(:widget_inbox) { create(:inbox, account: account) }
let!(:facebook_channel) { create(:channel_facebook_page, account: account) }
let!(:facebook_inbox) { create(:inbox, channel: facebook_channel, account: account) }
let!(:contact) { create(:contact, account: account) }
let(:contact_inbox) { create(:contact_inbox, contact: contact, inbox: facebook_inbox) }
let(:conversation) { create(:conversation, contact: contact, inbox: facebook_inbox, contact_inbox: contact_inbox) }
describe '#perform' do
context 'without reply' do
it 'if message is private' do
message = create(:message, message_type: 'outgoing', private: true, inbox: facebook_inbox, account: account)
described_class.new(message: message).perform
expect(bot).not_to have_received(:deliver)
end
it 'if inbox channel is not facebook page' do
message = create(:message, message_type: 'outgoing', inbox: widget_inbox, account: account)
expect { described_class.new(message: message).perform }.to raise_error 'Invalid channel service was called'
expect(bot).not_to have_received(:deliver)
end
it 'if message is not outgoing' do
message = create(:message, message_type: 'incoming', inbox: facebook_inbox, account: account)
described_class.new(message: message).perform
expect(bot).not_to have_received(:deliver)
end
it 'if message has an FB ID' do
message = create(:message, message_type: 'outgoing', inbox: facebook_inbox, account: account, source_id: SecureRandom.uuid)
described_class.new(message: message).perform
expect(bot).not_to have_received(:deliver)
end
end
context 'with reply' do
it 'if message is sent from chatwoot and is outgoing' do
message = create(:message, message_type: 'outgoing', inbox: facebook_inbox, account: account, conversation: conversation)
described_class.new(message: message).perform
expect(bot).to have_received(:deliver)
end
it 'raise and exception to validate access token' do
message = create(:message, message_type: 'outgoing', inbox: facebook_inbox, account: account, conversation: conversation)
allow(bot).to receive(:deliver).and_raise(Facebook::Messenger::FacebookError.new('message' => 'Error validating access token'))
described_class.new(message: message).perform
expect(facebook_channel.authorization_error_count).to eq(1)
expect(message.reload.status).to eq('failed')
expect(message.reload.external_error).to eq('Error validating access token')
end
it 'if message with attachment is sent from chatwoot and is outgoing' do
message = build(:message, message_type: 'outgoing', inbox: facebook_inbox, account: account, conversation: conversation)
attachment = message.attachments.new(account_id: message.account_id, file_type: :image)
attachment.file.attach(io: Rails.root.join('spec/assets/avatar.png').open, filename: 'avatar.png', content_type: 'image/png')
message.save!
allow(attachment).to receive(:download_url).and_return('url1')
described_class.new(message: message).perform
expect(bot).to have_received(:deliver).with({
recipient: { id: contact_inbox.source_id },
message: { text: message.content },
messaging_type: 'MESSAGE_TAG',
tag: 'ACCOUNT_UPDATE'
}, { page_id: facebook_channel.page_id })
expect(bot).to have_received(:deliver).with({
recipient: { id: contact_inbox.source_id },
message: {
attachment: {
type: 'image',
payload: {
url: 'url1'
}
}
},
messaging_type: 'MESSAGE_TAG',
tag: 'ACCOUNT_UPDATE'
}, { page_id: facebook_channel.page_id })
end
it 'if message sent from chatwoot is failed' do
message = create(:message, message_type: 'outgoing', inbox: facebook_inbox, account: account, conversation: conversation)
allow(bot).to receive(:deliver).and_return({ error: { message: 'Invalid OAuth access token.', type: 'OAuthException', code: 190,
fbtrace_id: 'BLBz/WZt8dN' } }.to_json)
described_class.new(message: message).perform
expect(bot).to have_received(:deliver)
expect(message.reload.status).to eq('failed')
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# ref : https://developers.line.biz/en/docs/messaging-api/receiving-messages/#webhook-event-types
# https://developers.line.biz/en/reference/messaging-api/#message-event
class Line::IncomingMessageService
include ::FileTypeHelper
pattr_initialize [:inbox!, :params!]
def perform
# probably test events
return if params[:events].blank?
line_contact_info
return if line_contact_info['userId'].blank?
set_contact
set_conversation
parse_events
end
private
def parse_events
params[:events].each do |event|
next unless message_created? event
attach_files event['message']
end
end
def message_created?(event)
return unless event_type_message?(event)
@message = @conversation.messages.create!(
content: event['message']['text'],
account_id: @inbox.account_id,
inbox_id: @inbox.id,
message_type: :incoming,
sender: @contact,
source_id: event['message']['id'].to_s
)
@message
end
def attach_files(message)
return unless message_type_non_text?(message['type'])
response = inbox.channel.client.get_message_content(message['id'])
file_name = "media-#{message['id']}.#{response.content_type.split('/')[1]}"
temp_file = Tempfile.new(file_name)
temp_file.binmode
temp_file << response.body
temp_file.rewind
@message.attachments.new(
account_id: @message.account_id,
file_type: file_content_type(response),
file: {
io: temp_file,
filename: file_name,
content_type: response.content_type
}
)
@message.save!
end
def event_type_message?(event)
event['type'] == 'message'
end
def message_type_non_text?(type)
[Line::Bot::Event::MessageType::Video, Line::Bot::Event::MessageType::Audio, Line::Bot::Event::MessageType::Image].include?(type)
end
def account
@account ||= inbox.account
end
def line_contact_info
@line_contact_info ||= JSON.parse(inbox.channel.client.get_profile(params[:events].first['source']['userId']).body)
end
def set_contact
contact_inbox = ::ContactInboxWithContactBuilder.new(
source_id: line_contact_info['userId'],
inbox: inbox,
contact_attributes: contact_attributes
).perform
@contact_inbox = contact_inbox
@contact = contact_inbox.contact
end
def conversation_params
{
account_id: @inbox.account_id,
inbox_id: @inbox.id,
contact_id: @contact.id,
contact_inbox_id: @contact_inbox.id
}
end
def set_conversation
@conversation = @contact_inbox.conversations.first
return if @conversation
@conversation = ::Conversation.create!(conversation_params)
end
def contact_attributes
{
name: line_contact_info['displayName'],
avatar_url: line_contact_info['pictureUrl']
}
end
def file_content_type(file_content)
file_type(file_content.content_type)
end
end
``` | require 'rails_helper'
describe Line::IncomingMessageService do
let!(:line_channel) { create(:channel_line) }
let(:params) do
{
'destination': '2342234234',
'events': [
{
'replyToken': '0f3779fba3b349968c5d07db31eab56f',
'type': 'message',
'mode': 'active',
'timestamp': 1_462_629_479_859,
'source': {
'type': 'user',
'userId': 'U4af4980629'
},
'message': {
'id': '325708',
'type': 'text',
'text': 'Hello, world'
}
},
{
'replyToken': '8cf9239d56244f4197887e939187e19e',
'type': 'follow',
'mode': 'active',
'timestamp': 1_462_629_479_859,
'source': {
'type': 'user',
'userId': 'U4af4980629'
}
}
]
}.with_indifferent_access
end
let(:image_params) do
{
'destination': '2342234234',
'events': [
{
'replyToken': '0f3779fba3b349968c5d07db31eab56f',
'type': 'message',
'mode': 'active',
'timestamp': 1_462_629_479_859,
'source': {
'type': 'user',
'userId': 'U4af4980629'
},
'message': {
'type': 'image',
'id': '354718',
'contentProvider': {
'type': 'line'
}
}
},
{
'replyToken': '8cf9239d56244f4197887e939187e19e',
'type': 'follow',
'mode': 'active',
'timestamp': 1_462_629_479_859,
'source': {
'type': 'user',
'userId': 'U4af4980629'
}
}
]
}.with_indifferent_access
end
let(:video_params) do
{
'destination': '2342234234',
'events': [
{
'replyToken': '0f3779fba3b349968c5d07db31eab56f',
'type': 'message',
'mode': 'active',
'timestamp': 1_462_629_479_859,
'source': {
'type': 'user',
'userId': 'U4af4980629'
},
'message': {
'type': 'video',
'id': '354718',
'contentProvider': {
'type': 'line'
}
}
},
{
'replyToken': '8cf9239d56244f4197887e939187e19e',
'type': 'follow',
'mode': 'active',
'timestamp': 1_462_629_479_859,
'source': {
'type': 'user',
'userId': 'U4af4980629'
}
}
]
}.with_indifferent_access
end
describe '#perform' do
context 'when valid text message params' do
it 'creates appropriate conversations, message and contacts' do
line_bot = double
line_user_profile = double
allow(Line::Bot::Client).to receive(:new).and_return(line_bot)
allow(line_bot).to receive(:get_profile).and_return(line_user_profile)
allow(line_user_profile).to receive(:body).and_return(
{
'displayName': 'LINE Test',
'userId': 'U4af4980629',
'pictureUrl': 'https://test.com'
}.to_json
)
described_class.new(inbox: line_channel.inbox, params: params).perform
expect(line_channel.inbox.conversations).not_to eq(0)
expect(Contact.all.first.name).to eq('LINE Test')
expect(line_channel.inbox.messages.first.content).to eq('Hello, world')
end
end
context 'when valid image message params' do
it 'creates appropriate conversations, message and contacts' do
line_bot = double
line_user_profile = double
allow(Line::Bot::Client).to receive(:new).and_return(line_bot)
allow(line_bot).to receive(:get_profile).and_return(line_user_profile)
file = fixture_file_upload(Rails.root.join('spec/assets/avatar.png'), 'image/png')
allow(line_bot).to receive(:get_message_content).and_return(
OpenStruct.new({
body: Base64.encode64(file.read),
content_type: 'image/png'
})
)
allow(line_user_profile).to receive(:body).and_return(
{
'displayName': 'LINE Test',
'userId': 'U4af4980629',
'pictureUrl': 'https://test.com'
}.to_json
)
described_class.new(inbox: line_channel.inbox, params: image_params).perform
expect(line_channel.inbox.conversations).not_to eq(0)
expect(Contact.all.first.name).to eq('LINE Test')
expect(line_channel.inbox.messages.first.content).to be_nil
expect(line_channel.inbox.messages.first.attachments.first.file_type).to eq('image')
expect(line_channel.inbox.messages.first.attachments.first.file.blob.filename.to_s).to eq('media-354718.png')
end
end
context 'when valid video message params' do
it 'creates appropriate conversations, message and contacts' do
line_bot = double
line_user_profile = double
allow(Line::Bot::Client).to receive(:new).and_return(line_bot)
allow(line_bot).to receive(:get_profile).and_return(line_user_profile)
file = fixture_file_upload(Rails.root.join('spec/assets/sample.mp4'), 'video/mp4')
allow(line_bot).to receive(:get_message_content).and_return(
OpenStruct.new({
body: Base64.encode64(file.read),
content_type: 'video/mp4'
})
)
allow(line_user_profile).to receive(:body).and_return(
{
'displayName': 'LINE Test',
'userId': 'U4af4980629',
'pictureUrl': 'https://test.com'
}.to_json
)
described_class.new(inbox: line_channel.inbox, params: video_params).perform
expect(line_channel.inbox.conversations).not_to eq(0)
expect(Contact.all.first.name).to eq('LINE Test')
expect(line_channel.inbox.messages.first.content).to be_nil
expect(line_channel.inbox.messages.first.attachments.first.file_type).to eq('video')
expect(line_channel.inbox.messages.first.attachments.first.file.blob.filename.to_s).to eq('media-354718.mp4')
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Line::SendOnLineService < Base::SendOnChannelService
private
def channel_class
Channel::Line
end
def perform_reply
response = channel.client.push_message(message.conversation.contact_inbox.source_id, build_payload)
return if response.blank?
parsed_json = JSON.parse(response.body)
if response.code == '200'
# If the request is successful, update the message status to delivered
message.update!(status: :delivered)
else
# If the request is not successful, update the message status to failed and save the external error
message.update!(status: :failed, external_error: external_error(parsed_json))
end
end
def build_payload
if message.content && message.attachments.any?
[text_message, *attachments]
elsif message.content.nil? && message.attachments.any?
attachments
else
text_message
end
end
def attachments
message.attachments.map do |attachment|
# Support only image and video for now, https://developers.line.biz/en/reference/messaging-api/#image-message
next unless attachment.file_type == 'image' || attachment.file_type == 'video'
{
type: attachment.file_type,
originalContentUrl: attachment.download_url,
previewImageUrl: attachment.download_url
}
end
end
# https://developers.line.biz/en/reference/messaging-api/#text-message
def text_message
{
type: 'text',
text: message.content
}
end
# https://developers.line.biz/en/reference/messaging-api/#error-responses
def external_error(error)
# Message containing information about the error. See https://developers.line.biz/en/reference/messaging-api/#error-messages
message = error['message']
# An array of error details. If the array is empty, this property will not be included in the response.
details = error['details']
return message if details.blank?
detail_messages = details.map { |detail| "#{detail['property']}: #{detail['message']}" }
[message, detail_messages].join(', ')
end
end
``` | require 'rails_helper'
describe Line::SendOnLineService do
describe '#perform' do
let(:line_client) { double }
let(:line_channel) { create(:channel_line) }
let(:message) do
create(:message, message_type: :outgoing, content: 'test',
conversation: create(:conversation, inbox: line_channel.inbox))
end
before do
allow(Line::Bot::Client).to receive(:new).and_return(line_client)
end
context 'when message send' do
it 'calls @channel.client.push_message' do
allow(line_client).to receive(:push_message)
expect(line_client).to receive(:push_message)
described_class.new(message: message).perform
end
end
context 'when message send fails without details' do
let(:error_response) do
{
'message' => 'The request was invalid'
}.to_json
end
before do
allow(line_client).to receive(:push_message).and_return(OpenStruct.new(code: '400', body: error_response))
end
it 'updates the message status to failed' do
described_class.new(message: message).perform
message.reload
expect(message.status).to eq('failed')
end
it 'updates the external error without details' do
described_class.new(message: message).perform
message.reload
expect(message.external_error).to eq('The request was invalid')
end
end
context 'when message send fails with details' do
let(:error_response) do
{
'message' => 'The request was invalid',
'details' => [
{
'property' => 'messages[0].text',
'message' => 'May not be empty'
}
]
}.to_json
end
before do
allow(line_client).to receive(:push_message).and_return(OpenStruct.new(code: '400', body: error_response))
end
it 'updates the message status to failed' do
described_class.new(message: message).perform
message.reload
expect(message.status).to eq('failed')
end
it 'updates the external error with details' do
described_class.new(message: message).perform
message.reload
expect(message.external_error).to eq('The request was invalid, messages[0].text: May not be empty')
end
end
context 'when message send succeeds' do
let(:success_response) do
{
'message' => 'ok'
}.to_json
end
before do
allow(line_client).to receive(:push_message).and_return(OpenStruct.new(code: '200', body: success_response))
end
it 'updates the message status to delivered' do
described_class.new(message: message).perform
message.reload
expect(message.status).to eq('delivered')
end
end
context 'with message attachments' do
it 'sends the message with text and attachments' do
attachment = message.attachments.new(account_id: message.account_id, file_type: :image)
attachment.file.attach(io: Rails.root.join('spec/assets/avatar.png').open, filename: 'avatar.png', content_type: 'image/png')
expected_url_regex = %r{rails/active_storage/disk/[a-zA-Z0-9=_\-+]+/avatar\.png}
expect(line_client).to receive(:push_message).with(
message.conversation.contact_inbox.source_id,
[
{ type: 'text', text: message.content },
{
type: 'image',
originalContentUrl: match(expected_url_regex),
previewImageUrl: match(expected_url_regex)
}
]
)
described_class.new(message: message).perform
end
it 'sends the message with attachments only' do
attachment = message.attachments.new(account_id: message.account_id, file_type: :image)
attachment.file.attach(io: Rails.root.join('spec/assets/avatar.png').open, filename: 'avatar.png', content_type: 'image/png')
message.update!(content: nil)
expected_url_regex = %r{rails/active_storage/disk/[a-zA-Z0-9=_\-+]+/avatar\.png}
expect(line_client).to receive(:push_message).with(
message.conversation.contact_inbox.source_id,
[
{
type: 'image',
originalContentUrl: match(expected_url_regex),
previewImageUrl: match(expected_url_regex)
}
]
)
described_class.new(message: message).perform
end
it 'sends the message with text only' do
message.attachments.destroy_all
expect(line_client).to receive(:push_message).with(
message.conversation.contact_inbox.source_id,
{ type: 'text', text: message.content }
)
described_class.new(message: message).perform
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class Conversations::FilterService < FilterService
ATTRIBUTE_MODEL = 'conversation_attribute'.freeze
def initialize(params, user, filter_account = nil)
@account = filter_account || Current.account
super(params, user)
end
def perform
@conversations = conversation_query_builder
mine_count, unassigned_count, all_count, = set_count_for_all_conversations
assigned_count = all_count - unassigned_count
{
conversations: conversations,
count: {
mine_count: mine_count,
assigned_count: assigned_count,
unassigned_count: unassigned_count,
all_count: all_count
}
}
end
def conversation_query_builder
conversation_filters = @filters['conversations']
@params[:payload].each_with_index do |query_hash, current_index|
current_filter = conversation_filters[query_hash['attribute_key']]
@query_string += conversation_query_string(current_filter, query_hash, current_index)
end
base_relation.where(@query_string, @filter_values.with_indifferent_access)
end
def conversation_query_string(current_filter, query_hash, current_index)
attribute_key = query_hash[:attribute_key]
query_operator = query_hash[:query_operator]
filter_operator_value = filter_operation(query_hash, current_index)
return custom_attribute_query(query_hash, 'conversation_attribute', current_index) if current_filter.nil?
case current_filter['attribute_type']
when 'additional_attributes'
" conversations.additional_attributes ->> '#{attribute_key}' #{filter_operator_value} #{query_operator} "
when 'date_attributes'
" (conversations.#{attribute_key})::#{current_filter['data_type']} #{filter_operator_value}#{current_filter['data_type']} #{query_operator} "
when 'standard'
if attribute_key == 'labels'
" #{tag_filter_query('Conversation', 'conversations', query_hash, current_index)} "
else
" conversations.#{attribute_key} #{filter_operator_value} #{query_operator} "
end
end
end
def base_relation
@account.conversations.includes(
:taggings, :inbox, { assignee: { avatar_attachment: [:blob] } }, { contact: { avatar_attachment: [:blob] } }, :team, :messages, :contact_inbox
)
end
def current_page
@params[:page] || 1
end
def conversations
@conversations.latest.page(current_page)
end
end
``` | require 'rails_helper'
describe Conversations::FilterService do
subject(:filter_service) { described_class }
let!(:account) { create(:account) }
let!(:user_1) { create(:user, account: account) }
let!(:user_2) { create(:user, account: account) }
let!(:campaign_1) { create(:campaign, title: 'Test Campaign', account: account) }
let!(:campaign_2) { create(:campaign, title: 'Campaign', account: account) }
let!(:inbox) { create(:inbox, account: account, enable_auto_assignment: false) }
let!(:user_2_assigned_conversation) { create(:conversation, account: account, inbox: inbox, assignee: user_2) }
let!(:en_conversation_1) do
create(:conversation, account: account, inbox: inbox, assignee: user_1, campaign_id: campaign_1.id,
status: 'pending', additional_attributes: { 'browser_language': 'en' })
end
let!(:en_conversation_2) do
create(:conversation, account: account, inbox: inbox, assignee: user_1, campaign_id: campaign_2.id,
status: 'pending', additional_attributes: { 'browser_language': 'en' })
end
before do
create(:inbox_member, user: user_1, inbox: inbox)
create(:inbox_member, user: user_2, inbox: inbox)
Current.account = account
en_conversation_1.update!(custom_attributes: { conversation_additional_information: 'test custom data' })
en_conversation_2.update!(custom_attributes: { conversation_additional_information: 'test custom data', conversation_type: 'platinum' })
user_2_assigned_conversation.update!(custom_attributes: { conversation_type: 'platinum', conversation_created: '2022-01-19' })
create(:conversation, account: account, inbox: inbox, assignee: user_1)
create(:custom_attribute_definition,
attribute_key: 'conversation_type',
account: account,
attribute_model: 'conversation_attribute',
attribute_display_type: 'list',
attribute_values: %w[regular platinum gold])
create(:custom_attribute_definition,
attribute_key: 'conversation_created',
account: account,
attribute_model: 'conversation_attribute',
attribute_display_type: 'date')
create(:custom_attribute_definition,
attribute_key: 'conversation_additional_information',
account: account,
attribute_model: 'conversation_attribute',
attribute_display_type: 'text')
end
describe '#perform' do
context 'with query present' do
let!(:params) { { payload: [], page: 1 } }
let(:payload) do
[
{
attribute_key: 'browser_language',
filter_operator: 'contains',
values: 'en',
query_operator: 'AND',
custom_attribute_type: ''
}.with_indifferent_access,
{
attribute_key: 'status',
filter_operator: 'not_equal_to',
values: %w[resolved],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
end
it 'filter conversations by additional_attributes and status' do
params[:payload] = payload
result = filter_service.new(params, user_1).perform
conversations = Conversation.where("additional_attributes ->> 'browser_language' IN (?) AND status IN (?)", ['en'], [1, 2])
expect(result[:count][:all_count]).to be conversations.count
end
it 'filter conversations by additional_attributes and status with pagination' do
params[:payload] = payload
params[:page] = 2
result = filter_service.new(params, user_1).perform
conversations = Conversation.where("additional_attributes ->> 'browser_language' IN (?) AND status IN (?)", ['en'], [1, 2])
expect(result[:count][:all_count]).to be conversations.count
end
it 'filters items with contains filter_operator with values being an array' do
params[:payload] = [{
attribute_key: 'browser_language',
filter_operator: 'contains',
values: %w[tr fr],
query_operator: '',
custom_attribute_type: ''
}.with_indifferent_access]
create(:conversation, account: account, inbox: inbox, assignee: user_1, campaign_id: campaign_1.id,
status: 'pending', additional_attributes: { 'browser_language': 'fr' })
create(:conversation, account: account, inbox: inbox, assignee: user_1, campaign_id: campaign_1.id,
status: 'pending', additional_attributes: { 'browser_language': 'tr' })
result = filter_service.new(params, user_1).perform
expect(result[:count][:all_count]).to be 2
end
it 'filters items with does not contain filter operator with values being an array' do
params[:payload] = [{
attribute_key: 'browser_language',
filter_operator: 'does_not_contain',
values: %w[tr en],
query_operator: '',
custom_attribute_type: ''
}.with_indifferent_access]
create(:conversation, account: account, inbox: inbox, assignee: user_1, campaign_id: campaign_1.id,
status: 'pending', additional_attributes: { 'browser_language': 'fr' })
create(:conversation, account: account, inbox: inbox, assignee: user_1, campaign_id: campaign_1.id,
status: 'pending', additional_attributes: { 'browser_language': 'tr' })
result = filter_service.new(params, user_1).perform
expect(result[:count][:all_count]).to be 1
expect(result[:conversations].first.additional_attributes['browser_language']).to eq 'fr'
end
it 'filter conversations by additional_attributes with NOT_IN filter' do
payload = [{ attribute_key: 'conversation_type', filter_operator: 'not_equal_to', values: 'platinum', query_operator: nil,
custom_attribute_type: 'conversation_attribute' }.with_indifferent_access]
params[:payload] = payload
result = filter_service.new(params, user_1).perform
conversations = Conversation.where(
"custom_attributes ->> 'conversation_type' NOT IN (?) OR custom_attributes ->> 'conversation_type' IS NULL", ['platinum']
)
expect(result[:count][:all_count]).to be conversations.count
end
it 'filter conversations by tags' do
user_2_assigned_conversation.update_labels('support')
params[:payload] = [
{
attribute_key: 'assignee_id',
filter_operator: 'equal_to',
values: [user_1.id, user_2.id],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'labels',
filter_operator: 'equal_to',
values: ['support'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'labels',
filter_operator: 'not_equal_to',
values: ['random-label'],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expect(result[:count][:all_count]).to be 1
end
it 'filter conversations by is_present filter_operator' do
params[:payload] = [
{
attribute_key: 'assignee_id',
filter_operator: 'equal_to',
values: [
user_1.id,
user_2.id
],
query_operator: 'AND',
custom_attribute_type: ''
}.with_indifferent_access,
{
attribute_key: 'campaign_id',
filter_operator: 'is_present',
values: [],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expect(result[:count][:all_count]).to be 2
expect(result[:conversations].pluck(:campaign_id).sort).to eq [campaign_2.id, campaign_1.id].sort
end
end
end
describe '#perform on custom attribute' do
context 'with query present' do
let!(:params) { { payload: [], page: 1 } }
it 'filter by custom_attributes and labels' do
user_2_assigned_conversation.update_labels('support')
params[:payload] = [
{
attribute_key: 'conversation_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'conversation_created',
filter_operator: 'is_less_than',
values: ['2022-01-20'],
query_operator: 'OR',
custom_attribute_type: ''
}.with_indifferent_access,
{
attribute_key: 'labels',
filter_operator: 'equal_to',
values: ['support'],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expect(result[:conversations].length).to be 1
expect(result[:conversations][0][:id]).to be user_2_assigned_conversation.id
end
it 'filter by custom_attributes and labels with custom_attribute_type nil' do
user_2_assigned_conversation.update_labels('support')
params[:payload] = [
{
attribute_key: 'conversation_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND'
}.with_indifferent_access,
{
attribute_key: 'conversation_created',
filter_operator: 'is_less_than',
values: ['2022-01-20'],
query_operator: 'OR',
custom_attribute_type: nil
}.with_indifferent_access,
{
attribute_key: 'labels',
filter_operator: 'equal_to',
values: ['support'],
query_operator: nil
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expect(result[:conversations].length).to be 1
expect(result[:conversations][0][:id]).to be user_2_assigned_conversation.id
end
it 'filter by custom_attributes' do
params[:payload] = [
{
attribute_key: 'conversation_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND',
custom_attribute_type: ''
}.with_indifferent_access,
{
attribute_key: 'conversation_created',
filter_operator: 'is_less_than',
values: ['2022-01-20'],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expect(result[:conversations].length).to be 1
end
it 'filter by custom_attributes with custom_attribute_type nil' do
params[:payload] = [
{
attribute_key: 'conversation_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND',
custom_attribute_type: nil
}.with_indifferent_access,
{
attribute_key: 'conversation_created',
filter_operator: 'is_less_than',
values: ['2022-01-20'],
query_operator: nil,
custom_attribute_type: nil
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expect(result[:conversations].length).to be 1
end
it 'filter by custom_attributes and additional_attributes' do
params[:payload] = [
{
attribute_key: 'conversation_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND',
custom_attribute_type: ''
}.with_indifferent_access,
{
attribute_key: 'browser_language',
filter_operator: 'is_equal_to',
values: 'en',
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expect(result[:conversations].length).to be 1
end
end
end
describe '#perform on date filter' do
context 'with query present' do
let!(:params) { { payload: [], page: 1 } }
it 'filter by created_at' do
params[:payload] = [
{
attribute_key: 'created_at',
filter_operator: 'is_greater_than',
values: ['2022-01-20'],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expected_count = Conversation.where('created_at > ?', DateTime.parse('2022-01-20')).count
expect(result[:conversations].length).to be expected_count
end
it 'filter by created_at and conversation_type' do
params[:payload] = [
{
attribute_key: 'conversation_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: 'AND',
custom_attribute_type: ''
}.with_indifferent_access,
{
attribute_key: 'created_at',
filter_operator: 'is_greater_than',
values: ['2022-01-20'],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
result = filter_service.new(params, user_1).perform
expected_count = Conversation.where("created_at > ? AND custom_attributes->>'conversation_type' = ?", DateTime.parse('2022-01-20'),
'platinum').count
expect(result[:conversations].length).to be expected_count
end
context 'with x_days_before filter' do
before do
Time.zone = 'UTC'
en_conversation_1.update!(last_activity_at: (Time.zone.today - 4.days))
en_conversation_2.update!(last_activity_at: (Time.zone.today - 5.days))
user_2_assigned_conversation.update!(last_activity_at: (Time.zone.today - 2.days))
end
it 'filter by last_activity_at 3_days_before and custom_attributes' do
params[:payload] = [
{
attribute_key: 'last_activity_at',
filter_operator: 'days_before',
values: [3],
query_operator: 'AND',
custom_attribute_type: ''
}.with_indifferent_access,
{
attribute_key: 'conversation_type',
filter_operator: 'equal_to',
values: ['platinum'],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
expected_count = Conversation.where("last_activity_at < ? AND custom_attributes->>'conversation_type' = ?", (Time.zone.today - 3.days),
'platinum').count
result = filter_service.new(params, user_1).perform
expect(result[:conversations].length).to be expected_count
end
it 'filter by last_activity_at 2_days_before' do
params[:payload] = [
{
attribute_key: 'last_activity_at',
filter_operator: 'days_before',
values: [3],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
expected_count = Conversation.where('last_activity_at < ?', (Time.zone.today - 2.days)).count
result = filter_service.new(params, user_1).perform
expect(result[:conversations].length).to be expected_count
end
end
end
end
describe '#perform on date filter with no current account' do
before do
Current.account = nil
end
context 'with query present' do
let!(:params) { { payload: [], page: 1 } }
it 'filter by created_at' do
params[:payload] = [
{
attribute_key: 'created_at',
filter_operator: 'is_greater_than',
values: ['2022-01-20'],
query_operator: nil,
custom_attribute_type: ''
}.with_indifferent_access
]
result = filter_service.new(params, user_1, account).perform
expected_count = Conversation.where('created_at > ?', DateTime.parse('2022-01-20')).count
expect(Current.account).to be_nil
expect(result[:conversations].length).to be expected_count
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
class AutomationRules::ActionService < ActionService
def initialize(rule, account, conversation)
super(conversation)
@rule = rule
@account = account
Current.executed_by = rule
end
def perform
@rule.actions.each do |action|
@conversation.reload
action = action.with_indifferent_access
begin
send(action[:action_name], action[:action_params])
rescue StandardError => e
ChatwootExceptionTracker.new(e, account: @account).capture_exception
end
end
ensure
Current.reset
end
private
def send_attachment(blob_ids)
return if conversation_a_tweet?
return unless @rule.files.attached?
blobs = ActiveStorage::Blob.where(id: blob_ids)
return if blobs.blank?
params = { content: nil, private: false, attachments: blobs }
Messages::MessageBuilder.new(nil, @conversation, params).perform
end
def send_webhook_event(webhook_url)
payload = @conversation.webhook_data.merge(event: "automation_event.#{@rule.event_name}")
WebhookJob.perform_later(webhook_url[0], payload)
end
def send_message(message)
return if conversation_a_tweet?
params = { content: message[0], private: false, content_attributes: { automation_rule_id: @rule.id } }
Messages::MessageBuilder.new(nil, @conversation, params).perform
end
def send_email_to_team(params)
teams = Team.where(id: params[0][:team_ids])
teams.each do |team|
TeamNotifications::AutomationNotificationMailer.conversation_creation(@conversation, team, params[0][:message])&.deliver_now
end
end
end
``` | require 'rails_helper'
RSpec.describe AutomationRules::ActionService do
let(:account) { create(:account) }
let(:agent) { create(:user, account: account) }
let(:conversation) { create(:conversation, account: account) }
let!(:rule) do
create(:automation_rule, account: account,
actions: [
{ action_name: 'send_webhook_event', action_params: ['https://example.com'] },
{ action_name: 'send_message', action_params: { message: 'Hello' } }
])
end
describe '#perform' do
context 'when actions are defined in the rule' do
it 'will call the actions' do
expect(Messages::MessageBuilder).to receive(:new)
expect(WebhookJob).to receive(:perform_later)
described_class.new(rule, account, conversation).perform
end
end
describe '#perform with send_attachment action' do
let(:message_builder) { double }
before do
allow(Messages::MessageBuilder).to receive(:new).and_return(message_builder)
rule.actions.delete_if { |a| a['action_name'] == 'send_message' }
rule.files.attach(io: Rails.root.join('spec/assets/avatar.png').open, filename: 'avatar.png', content_type: 'image/png')
rule.save!
rule.actions << { action_name: 'send_attachment', action_params: [rule.files.first.blob_id] }
end
it 'will send attachment' do
expect(message_builder).to receive(:perform)
described_class.new(rule, account, conversation).perform
end
it 'will not send attachment is conversation is a tweet' do
twitter_inbox = create(:inbox, channel: create(:channel_twitter_profile, account: account))
conversation = create(:conversation, inbox: twitter_inbox, additional_attributes: { type: 'tweet' })
expect(message_builder).not_to receive(:perform)
described_class.new(rule, account, conversation).perform
end
end
describe '#perform with send_webhook_event action' do
it 'will send webhook event' do
expect(rule.actions.pluck('action_name')).to include('send_webhook_event')
expect(WebhookJob).to receive(:perform_later)
described_class.new(rule, account, conversation).perform
end
end
describe '#perform with send_message action' do
let(:message_builder) { double }
before do
allow(Messages::MessageBuilder).to receive(:new).and_return(message_builder)
end
it 'will send message' do
expect(rule.actions.pluck('action_name')).to include('send_message')
expect(message_builder).to receive(:perform)
described_class.new(rule, account, conversation).perform
end
it 'will not send message if conversation is a tweet' do
expect(rule.actions.pluck('action_name')).to include('send_message')
twitter_inbox = create(:inbox, channel: create(:channel_twitter_profile, account: account))
conversation = create(:conversation, inbox: twitter_inbox, additional_attributes: { type: 'tweet' })
expect(message_builder).not_to receive(:perform)
described_class.new(rule, account, conversation).perform
end
end
describe '#perform with send_email_to_team action' do
let!(:team) { create(:team, account: account) }
before do
rule.actions << { action_name: 'send_email_to_team', action_params: [{ team_ids: [team.id], message: 'Hello' }] }
end
it 'will send email to team' do
expect(TeamNotifications::AutomationNotificationMailer).to receive(:conversation_creation).with(conversation, team, 'Hello').and_call_original
described_class.new(rule, account, conversation).perform
end
end
describe '#perform with send_email_transcript action' do
before do
rule.actions << { action_name: 'send_email_transcript', action_params: ['[email protected], [email protected],[email protected]'] }
rule.save
end
it 'will send email to transcript to action params emails' do
mailer = double
allow(ConversationReplyMailer).to receive(:with).and_return(mailer)
allow(mailer).to receive(:conversation_transcript).with(conversation, '[email protected]')
allow(mailer).to receive(:conversation_transcript).with(conversation, '[email protected]')
allow(mailer).to receive(:conversation_transcript).with(conversation, '[email protected]')
described_class.new(rule, account, conversation).perform
expect(mailer).to have_received(:conversation_transcript).exactly(3).times
end
it 'will send email to transcript to contacts' do
rule.actions = [{ action_name: 'send_email_transcript', action_params: ['{{contact.email}}'] }]
rule.save
mailer = double
allow(ConversationReplyMailer).to receive(:with).and_return(mailer)
allow(mailer).to receive(:conversation_transcript).with(conversation, conversation.contact.email)
described_class.new(rule.reload, account, conversation).perform
expect(mailer).to have_received(:conversation_transcript).exactly(1).times
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
require 'json'
class AutomationRules::ConditionsFilterService < FilterService
ATTRIBUTE_MODEL = 'contact_attribute'.freeze
def initialize(rule, conversation = nil, options = {})
super([], nil)
@rule = rule
@conversation = conversation
@account = conversation.account
file = File.read('./lib/filters/filter_keys.json')
@filters = JSON.parse(file)
@options = options
@changed_attributes = options[:changed_attributes]
end
def perform
@conversation_filters = @filters['conversations']
@contact_filters = @filters['contacts']
@message_filters = @filters['messages']
@attribute_changed_query_filter = []
@rule.conditions.each_with_index do |query_hash, current_index|
@attribute_changed_query_filter << query_hash and next if query_hash['filter_operator'] == 'attribute_changed'
apply_filter(query_hash, current_index)
end
records = base_relation.where(@query_string, @filter_values.with_indifferent_access)
records = perform_attribute_changed_filter(records) if @attribute_changed_query_filter.any?
records.any?
end
def filter_operation(query_hash, current_index)
if query_hash[:filter_operator] == 'starts_with'
@filter_values["value_#{current_index}"] = "#{string_filter_values(query_hash)}%"
like_filter_string(query_hash[:filter_operator], current_index)
else
super
end
end
def apply_filter(query_hash, current_index)
conversation_filter = @conversation_filters[query_hash['attribute_key']]
contact_filter = @contact_filters[query_hash['attribute_key']]
message_filter = @message_filters[query_hash['attribute_key']]
if conversation_filter
@query_string += conversation_query_string('conversations', conversation_filter, query_hash.with_indifferent_access, current_index)
elsif contact_filter
@query_string += contact_query_string(contact_filter, query_hash.with_indifferent_access, current_index)
elsif message_filter
@query_string += message_query_string(message_filter, query_hash.with_indifferent_access, current_index)
elsif custom_attribute(query_hash['attribute_key'], @account, query_hash['custom_attribute_type'])
# send table name according to attribute key right now we are supporting contact based custom attribute filter
@query_string += custom_attribute_query(query_hash.with_indifferent_access, query_hash['custom_attribute_type'], current_index)
end
end
# If attribute_changed type filter is present perform this against array
def perform_attribute_changed_filter(records)
@attribute_changed_records = []
current_attribute_changed_record = base_relation
filter_based_on_attribute_change(records, current_attribute_changed_record)
@attribute_changed_records.uniq
end
# Loop through attribute_changed_query_filter
def filter_based_on_attribute_change(records, current_attribute_changed_record)
@attribute_changed_query_filter.each do |filter|
@changed_attributes = @changed_attributes.with_indifferent_access
changed_attribute = @changed_attributes[filter['attribute_key']].presence
if changed_attribute[0].in?(filter['values']['from']) && changed_attribute[1].in?(filter['values']['to'])
@attribute_changed_records = attribute_changed_filter_query(filter, records, current_attribute_changed_record)
end
current_attribute_changed_record = @attribute_changed_records
end
end
# We intersect with the record if query_operator-AND is present and union if query_operator-OR is present
def attribute_changed_filter_query(filter, records, current_attribute_changed_record)
if filter['query_operator'] == 'AND'
@attribute_changed_records + (current_attribute_changed_record & records)
else
@attribute_changed_records + (current_attribute_changed_record | records)
end
end
def message_query_string(current_filter, query_hash, current_index)
attribute_key = query_hash['attribute_key']
query_operator = query_hash['query_operator']
attribute_key = 'processed_message_content' if attribute_key == 'content'
filter_operator_value = filter_operation(query_hash, current_index)
case current_filter['attribute_type']
when 'standard'
if current_filter['data_type'] == 'text'
" LOWER(messages.#{attribute_key}) #{filter_operator_value} #{query_operator} "
else
" messages.#{attribute_key} #{filter_operator_value} #{query_operator} "
end
end
end
# This will be used in future for contact automation rule
def contact_query_string(current_filter, query_hash, current_index)
attribute_key = query_hash['attribute_key']
query_operator = query_hash['query_operator']
filter_operator_value = filter_operation(query_hash, current_index)
case current_filter['attribute_type']
when 'additional_attributes'
" contacts.additional_attributes ->> '#{attribute_key}' #{filter_operator_value} #{query_operator} "
when 'standard'
" contacts.#{attribute_key} #{filter_operator_value} #{query_operator} "
end
end
def conversation_query_string(table_name, current_filter, query_hash, current_index)
attribute_key = query_hash['attribute_key']
query_operator = query_hash['query_operator']
filter_operator_value = filter_operation(query_hash, current_index)
case current_filter['attribute_type']
when 'additional_attributes'
" #{table_name}.additional_attributes ->> '#{attribute_key}' #{filter_operator_value} #{query_operator} "
when 'standard'
if attribute_key == 'labels'
" tags.id #{filter_operator_value} #{query_operator} "
else
" #{table_name}.#{attribute_key} #{filter_operator_value} #{query_operator} "
end
end
end
private
def base_relation
records = Conversation.where(id: @conversation.id).joins(
'LEFT OUTER JOIN contacts on conversations.contact_id = contacts.id'
).joins(
'LEFT OUTER JOIN messages on messages.conversation_id = conversations.id'
)
records = records.where(messages: { id: @options[:message].id }) if @options[:message].present?
records
end
end
``` | require 'rails_helper'
RSpec.describe AutomationRules::ConditionsFilterService do
let(:account) { create(:account) }
let(:conversation) { create(:conversation, account: account) }
let(:email_channel) { create(:channel_email, account: account) }
let(:email_inbox) { create(:inbox, channel: email_channel, account: account) }
let(:message) do
create(:message, account: account, conversation: conversation, content: 'test text', inbox: conversation.inbox, message_type: :incoming)
end
let(:rule) { create(:automation_rule, account: account) }
before do
conversation = create(:conversation, account: account)
conversation.contact.update(phone_number: '+918484828282', email: '[email protected]')
create(:conversation, account: account)
create(:conversation, account: account)
end
describe '#perform' do
context 'when conditions based on filter_operator equal_to' do
before do
rule.conditions = [{ 'values': ['open'], 'attribute_key': 'status', 'query_operator': nil, 'filter_operator': 'equal_to' }]
rule.save
end
context 'when conditions in rule matches with object' do
it 'will return true' do
expect(described_class.new(rule, conversation, { changed_attributes: { status: [nil, 'open'] } }).perform).to be(true)
end
end
context 'when conditions in rule does not match with object' do
it 'will return false' do
conversation.update(status: 'resolved')
expect(described_class.new(rule, conversation, { changed_attributes: { status: %w[open resolved] } }).perform).to be(false)
end
end
end
context 'when conditions based on filter_operator start_with' do
before do
contact = conversation.contact
contact.update(phone_number: '+918484848484')
rule.conditions = [
{ 'values': ['+918484'], 'attribute_key': 'phone_number', 'query_operator': 'OR', 'filter_operator': 'starts_with' },
{ 'values': ['test'], 'attribute_key': 'email', 'query_operator': nil, 'filter_operator': 'contains' }
]
rule.save
end
context 'when conditions in rule matches with object' do
it 'will return true' do
expect(described_class.new(rule, conversation, { changed_attributes: {} }).perform).to be(true)
end
end
context 'when conditions in rule does not match with object' do
it 'will return false' do
conversation.contact.update(phone_number: '+918585858585')
expect(described_class.new(rule, conversation, { changed_attributes: {} }).perform).to be(false)
end
end
end
context 'when conditions based on messages attributes' do
context 'when filter_operator is equal_to' do
before do
rule.conditions = [
{ 'values': ['test text'], 'attribute_key': 'content', 'query_operator': 'AND', 'filter_operator': 'equal_to' },
{ 'values': ['incoming'], 'attribute_key': 'message_type', 'query_operator': nil, 'filter_operator': 'equal_to' }
]
rule.save
end
it 'will return true when conditions matches' do
expect(described_class.new(rule, conversation, { message: message, changed_attributes: {} }).perform).to be(true)
end
it 'will return false when conditions in rule does not match' do
message.update!(message_type: :outgoing)
expect(described_class.new(rule, conversation, { message: message, changed_attributes: {} }).perform).to be(false)
end
end
context 'when filter_operator is on processed_message_content' do
before do
rule.conditions = [
{ 'values': ['help'], 'attribute_key': 'content', 'query_operator': 'AND', 'filter_operator': 'contains' },
{ 'values': ['incoming'], 'attribute_key': 'message_type', 'query_operator': nil, 'filter_operator': 'equal_to' }
]
rule.save
end
let(:conversation) { create(:conversation, account: account, inbox: email_inbox) }
let(:message) do
create(:message, account: account, conversation: conversation, content: "We will help you\n\n\n test",
inbox: conversation.inbox, message_type: :incoming,
content_attributes: { email: { text_content: { quoted: 'We will help you' } } })
end
it 'will return true for processed_message_content matches' do
message
expect(described_class.new(rule, conversation, { message: message, changed_attributes: {} }).perform).to be(true)
end
it 'will return false when processed_message_content does no match' do
rule.update(conditions: [{ 'values': ['text'], 'attribute_key': 'content', 'query_operator': nil, 'filter_operator': 'contains' }])
expect(described_class.new(rule, conversation, { message: message, changed_attributes: {} }).perform).to be(false)
end
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# Find the various telegram payload samples here: https://core.telegram.org/bots/webhooks#testing-your-bot-with-updates
# https://core.telegram.org/bots/api#available-types
class Telegram::UpdateMessageService
pattr_initialize [:inbox!, :params!]
def perform
find_contact_inbox
find_conversation
find_message
update_message
rescue StandardError => e
Rails.logger.error "Error while processing telegram message update #{e.message}"
end
private
def find_contact_inbox
@contact_inbox = inbox.contact_inboxes.find_by!(source_id: params[:edited_message][:chat][:id])
end
def find_conversation
@conversation = @contact_inbox.conversations.last
end
def find_message
@message = @conversation.messages.find_by(source_id: params[:edited_message][:message_id])
end
def update_message
@message.update!(content: params[:edited_message][:text])
end
end
``` | require 'rails_helper'
describe Telegram::UpdateMessageService do
let!(:telegram_channel) { create(:channel_telegram) }
let!(:update_params) do
{
'update_id': 2_323_484,
'edited_message': {
'message_id': 48,
'from': {
'id': 512_313_123_171_248,
'is_bot': false,
'first_name': 'Sojan',
'last_name': 'Jose',
'username': 'sojan'
},
'chat': {
'id': 517_123_213_211_248,
'first_name': 'Sojan',
'last_name': 'Jose',
'username': 'sojan',
'type': 'private'
},
'date': 1_680_088_034,
'edit_date': 1_680_088_056,
'text': 'updated message'
}
}
end
describe '#perform' do
context 'when valid update message params' do
it 'updates the appropriate message' do
contact_inbox = create(:contact_inbox, inbox: telegram_channel.inbox, source_id: update_params[:edited_message][:chat][:id])
conversation = create(:conversation, contact_inbox: contact_inbox)
message = create(:message, conversation: conversation, source_id: update_params[:edited_message][:message_id])
described_class.new(inbox: telegram_channel.inbox, params: update_params.with_indifferent_access).perform
expect(message.reload.content).to eq('updated message')
end
end
context 'when invalid update message params' do
it 'will not raise errors' do
expect do
described_class.new(inbox: telegram_channel.inbox, params: {}).perform
end.not_to raise_error
end
end
end
end
|
Write RSpec test file for following ruby class
```ruby
# Find the various telegram payload samples here: https://core.telegram.org/bots/webhooks#testing-your-bot-with-updates
# https://core.telegram.org/bots/api#available-types
class Telegram::IncomingMessageService
include ::FileTypeHelper
include ::Telegram::ParamHelpers
pattr_initialize [:inbox!, :params!]
def perform
# chatwoot doesn't support group conversations at the moment
return unless private_message?
set_contact
update_contact_avatar
set_conversation
@message = @conversation.messages.build(
content: telegram_params_message_content,
account_id: @inbox.account_id,
inbox_id: @inbox.id,
message_type: :incoming,
sender: @contact,
content_attributes: telegram_params_content_attributes,
source_id: telegram_params_message_id.to_s
)
process_message_attachments if message_params?
@message.save!
end
private
def set_contact
contact_inbox = ::ContactInboxWithContactBuilder.new(
source_id: telegram_params_from_id,
inbox: inbox,
contact_attributes: contact_attributes
).perform
@contact_inbox = contact_inbox
@contact = contact_inbox.contact
end
def process_message_attachments
attach_location
attach_files
end
def update_contact_avatar
return if @contact.avatar.attached?
avatar_url = inbox.channel.get_telegram_profile_image(telegram_params_from_id)
::Avatar::AvatarFromUrlJob.perform_later(@contact, avatar_url) if avatar_url
end
def conversation_params
{
account_id: @inbox.account_id,
inbox_id: @inbox.id,
contact_id: @contact.id,
contact_inbox_id: @contact_inbox.id,
additional_attributes: conversation_additional_attributes
}
end
def set_conversation
@conversation = @contact_inbox.conversations.first
return if @conversation
@conversation = ::Conversation.create!(conversation_params)
end
def contact_attributes
{
name: "#{telegram_params_first_name} #{telegram_params_last_name}",
additional_attributes: additional_attributes
}
end
def additional_attributes
{
username: telegram_params_username,
language_code: telegram_params_language_code
}
end
def conversation_additional_attributes
{
chat_id: telegram_params_chat_id
}
end
def file_content_type
return :image if params[:message][:photo].present? || params.dig(:message, :sticker, :thumb).present?
return :audio if params[:message][:voice].present? || params[:message][:audio].present?
return :video if params[:message][:video].present?
file_type(params[:message][:document][:mime_type])
end
def attach_files
return unless file
attachment_file = Down.download(
inbox.channel.get_telegram_file_path(file[:file_id])
)
@message.attachments.new(
account_id: @message.account_id,
file_type: file_content_type,
file: {
io: attachment_file,
filename: attachment_file.original_filename,
content_type: attachment_file.content_type
}
)
end
def attach_location
return unless location
@message.attachments.new(
account_id: @message.account_id,
file_type: :location,
coordinates_lat: location['latitude'],
coordinates_long: location['longitude']
)
end
def file
@file ||= visual_media_params || params[:message][:voice].presence || params[:message][:audio].presence || params[:message][:document].presence
end
def location
@location ||= params.dig(:message, :location).presence
end
def visual_media_params
params[:message][:photo].presence&.last || params.dig(:message, :sticker, :thumb).presence || params[:message][:video].presence
end
end
``` | require 'rails_helper'
describe Telegram::IncomingMessageService do
before do
stub_request(:any, /api.telegram.org/).to_return(headers: { content_type: 'application/json' }, body: {}.to_json, status: 200)
stub_request(:get, 'https://chatwoot-assets.local/sample.png').to_return(
status: 200,
body: File.read('spec/assets/sample.png'),
headers: {}
)
stub_request(:get, 'https://chatwoot-assets.local/sample.mov').to_return(
status: 200,
body: File.read('spec/assets/sample.mov'),
headers: {}
)
stub_request(:get, 'https://chatwoot-assets.local/sample.mp3').to_return(
status: 200,
body: File.read('spec/assets/sample.mp3'),
headers: {}
)
stub_request(:get, 'https://chatwoot-assets.local/sample.ogg').to_return(
status: 200,
body: File.read('spec/assets/sample.ogg'),
headers: {}
)
stub_request(:get, 'https://chatwoot-assets.local/sample.pdf').to_return(
status: 200,
body: File.read('spec/assets/sample.pdf'),
headers: {}
)
end
let!(:telegram_channel) { create(:channel_telegram) }
let!(:message_params) do
{
'message_id' => 1,
'from' => {
'id' => 23, 'is_bot' => false, 'first_name' => 'Sojan', 'last_name' => 'Jose', 'username' => 'sojan', 'language_code' => 'en'
},
'chat' => { 'id' => 23, 'first_name' => 'Sojan', 'last_name' => 'Jose', 'username' => 'sojan', 'type' => 'private' },
'date' => 1_631_132_077
}
end
describe '#perform' do
context 'when valid text message params' do
it 'creates appropriate conversations, message and contacts' do
params = {
'update_id' => 2_342_342_343_242,
'message' => { 'text' => 'test' }.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.content).to eq('test')
end
end
context 'when valid caption params' do
it 'creates appropriate conversations, message and contacts' do
params = {
'update_id' => 2_342_342_343_242,
'message' => { 'caption' => 'test' }.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.content).to eq('test')
end
end
context 'when group messages' do
it 'doesnot create conversations, message and contacts' do
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'message_id' => 1,
'from' => {
'id' => 23, 'is_bot' => false, 'first_name' => 'Sojan', 'last_name' => 'Jose', 'username' => 'sojan', 'language_code' => 'en'
},
'chat' => { 'id' => 23, 'first_name' => 'Sojan', 'last_name' => 'Jose', 'username' => 'sojan', 'type' => 'group' },
'date' => 1_631_132_077, 'text' => 'test'
}
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).to eq(0)
end
end
context 'when valid audio messages params' do
it 'creates appropriate conversations, message and contacts' do
allow(telegram_channel.inbox.channel).to receive(:get_telegram_file_path).and_return('https://chatwoot-assets.local/sample.mp3')
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'audio' => {
'file_id' => 'AwADBAADbXXXXXXXXXXXGBdhD2l6_XX',
'duration' => 243,
'mime_type' => 'audio/mpeg',
'file_size' => 3_897_500,
'title' => 'Test music file'
}
}.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.attachments.first.file_type).to eq('audio')
end
end
context 'when valid image attachment params' do
it 'creates appropriate conversations, message and contacts' do
allow(telegram_channel.inbox.channel).to receive(:get_telegram_file_path).and_return('https://chatwoot-assets.local/sample.png')
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'photo' => [{
'file_id' => 'AgACAgUAAxkBAAODYV3aGZlD6vhzKsE2WNmblsr6zKwAAi-tMRvCoeBWNQ1ENVBzJdwBAAMCAANzAAMhBA',
'file_unique_id' => 'AQADL60xG8Kh4FZ4', 'file_size' => 1883, 'width' => 90, 'height' => 67
}]
}.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.attachments.first.file_type).to eq('image')
end
end
context 'when valid sticker attachment params' do
it 'creates appropriate conversations, message and contacts' do
allow(telegram_channel.inbox.channel).to receive(:get_telegram_file_path).and_return('https://chatwoot-assets.local/sample.png')
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'sticker' => {
'emoji' => '👍', 'width' => 512, 'height' => 512, 'set_name' => 'a834556273_by_HopSins_1_anim', 'is_animated' => 1,
'thumb' => {
'file_id' => 'AAMCAQADGQEAA0dhXpKorj9CiRpNX3QOn7YPZ6XS4AAC4wADcVG-MexptyOf8SbfAQAHbQADIQQ',
'file_unique_id' => 'AQAD4wADcVG-MXI', 'file_size' => 4690, 'width' => 128, 'height' => 128
},
'file_id' => 'CAACAgEAAxkBAANHYV6SqK4_QokaTV90Dp-2D2el0uAAAuMAA3FRvjHsabcjn_Em3yEE',
'file_unique_id' => 'AgAD4wADcVG-MQ',
'file_size' => 7340
}
}.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.attachments.first.file_type).to eq('image')
end
end
context 'when valid video messages params' do
it 'creates appropriate conversations, message and contacts' do
allow(telegram_channel.inbox.channel).to receive(:get_telegram_file_path).and_return('https://chatwoot-assets.local/sample.mov')
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'video' => {
'duration' => 1, 'width' => 720, 'height' => 1280, 'file_name' => 'IMG_2170.MOV', 'mime_type' => 'video/mp4', 'thumb' => {
'file_id' => 'AAMCBQADGQEAA4ZhXd78Xz6_c6gCzbdIkgGiXJcwwwACqwMAAp3x8Fbhf3EWamgCWAEAB20AAyEE', 'file_unique_id' => 'AQADqwMAAp3x8FZy',
'file_size' => 11_462, 'width' => 180, 'height' => 320
}, 'file_id' => 'BAACAgUAAxkBAAOGYV3e_F8-v3OoAs23SJIBolyXMMMAAqsDAAKd8fBW4X9xFmpoAlghBA', 'file_unique_id' => 'AgADqwMAAp3x8FY',
'file_size' => 291_286
}
}.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.attachments.first.file_type).to eq('video')
end
end
context 'when valid voice attachment params' do
it 'creates appropriate conversations, message and contacts' do
allow(telegram_channel.inbox.channel).to receive(:get_telegram_file_path).and_return('https://chatwoot-assets.local/sample.ogg')
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'voice' => {
'duration' => 2, 'mime_type' => 'audio/ogg', 'file_id' => 'AwACAgUAAxkBAANjYVwnWF_w8LYTchqVdK9dY7mbwYEAAskDAALCoeBWFvS2u4zS6HAhBA',
'file_unique_id' => 'AgADyQMAAsKh4FY', 'file_size' => 11_833
}
}.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.attachments.first.file_type).to eq('audio')
end
end
context 'when valid document message params' do
it 'creates appropriate conversations, message and contacts' do
allow(telegram_channel.inbox.channel).to receive(:get_telegram_file_path).and_return('https://chatwoot-assets.local/sample.pdf')
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'document' => {
'file_id' => 'AwADBAADbXXXXXXXXXXXGBdhD2l6_XX',
'file_name' => 'Screenshot 2021-09-27 at 2.01.14 PM.png',
'mime_type' => 'application/png',
'file_size' => 536_392
}
}.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.attachments.first.file_type).to eq('file')
end
end
context 'when valid location message params' do
it 'creates appropriate conversations, message and contacts' do
params = {
'update_id' => 2_342_342_343_242,
'message' => {
'location': {
'latitude': 37.7893768,
'longitude': -122.3895553
}
}.merge(message_params)
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.attachments.first.file_type).to eq('location')
end
end
context 'when valid callback_query params' do
it 'creates appropriate conversations, message and contacts' do
params = {
'update_id' => 2_342_342_343_242,
'callback_query' => {
'id' => '2342342309929423',
'from' => {
'id' => 5_171_248,
'is_bot' => false,
'first_name' => 'Sojan',
'last_name' => 'Jose',
'username' => 'sojan',
'language_code' => 'en',
'is_premium' => true
},
'message' => message_params,
'chat_instance' => '-89923842384923492',
'data' => 'Option 1'
}
}.with_indifferent_access
described_class.new(inbox: telegram_channel.inbox, params: params).perform
expect(telegram_channel.inbox.conversations.count).not_to eq(0)
expect(Contact.all.first.name).to eq('Sojan Jose')
expect(telegram_channel.inbox.messages.first.content).to eq('Option 1')
end
end
end
end
|