Skip to content

Commit

Permalink
"All Articles" list (#2224)
Browse files Browse the repository at this point in the history
as well as Explainers under one list.

Checklist:

- [x] There is already an `articles` field under `TeamType`: We need to make sure that the `article_type` argument is not mandatory anymore.
- [x] Implement a `filtered_articles` method for the `Team` model, that uses SQL `UNION` and reuse the logic for explainers and fact-checks implemented by methods `filtered_explainers` and `filtered_fact_checks`, respectively.
  - [x] Guard against SQL injections.
  - [x] Test for  all filters.
  - [x] Test for all sorting options.
  - [x] Avoid N + 1 queries problem.

Reference: CV2-5007.
  • Loading branch information
caiosba authored Feb 20, 2025
1 parent ae4b6b4 commit 875f0e4
Show file tree
Hide file tree
Showing 7 changed files with 131 additions and 27 deletions.
2 changes: 1 addition & 1 deletion .rubocop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ Metrics/CyclomaticComplexity:
A complexity metric that is strongly correlated to the number
of test cases needed to validate a method.
Enabled: true
Max: 13
Max: 14

Metrics/LineLength:
Description: 'Limit lines to 80 characters.'
Expand Down
20 changes: 13 additions & 7 deletions app/graph/types/team_type.rb
Original file line number Diff line number Diff line change
Expand Up @@ -304,9 +304,10 @@ def tipline_requests(from_timestamp:, to_timestamp:)
end

field :articles, ::ArticleUnion.connection_type, null: true do
argument :article_type, GraphQL::Types::String, required: true, camelize: false
argument :article_type, GraphQL::Types::String, required: false, camelize: false

# Sort and pagination
argument :limit, GraphQL::Types::Int, required: false, default_value: 10
argument :offset, GraphQL::Types::Int, required: false, default_value: 0
argument :sort, GraphQL::Types::String, required: false, default_value: 'title'
argument :sort_type, GraphQL::Types::String, required: false, camelize: false, default_value: 'ASC'
Expand All @@ -331,13 +332,18 @@ def articles(**args)
sort = args[:sort].to_s
order = [:title, :language, :updated_at, :id].include?(sort.downcase.to_sym) ? sort.downcase.to_sym : :title
order_type = args[:sort_type].to_s.downcase.to_sym == :desc ? :desc : :asc
articles = Explainer.none
if args[:article_type] == 'explainer'
articles = object.filtered_explainers(args)
elsif args[:article_type] == 'fact-check'
articles = object.filtered_fact_checks(args)
if args[:article_type].blank?
limit = context[:current_arguments][:first] || args[:limit]
object.filtered_articles(args, limit.to_i, args[:offset].to_i, order, order_type)
else
articles = nil
if args[:article_type] == 'explainer'
articles = object.filtered_explainers(args)
elsif args[:article_type] == 'fact-check'
articles = object.filtered_fact_checks(args)
end
articles.offset(args[:offset].to_i).order(order => order_type)
end
articles.offset(args[:offset].to_i).order(order => order_type)
end

field :articles_count, GraphQL::Types::Int, null: true do
Expand Down
36 changes: 31 additions & 5 deletions app/models/team.rb
Original file line number Diff line number Diff line change
Expand Up @@ -487,6 +487,31 @@ def available_newsletter_header_types
available
end

def filtered_articles(filters = {}, limit = 10, offset = 0, order = 'created_at', order_type = 'DESC')
# Re-use existing methods to build the SQL queries for fact-checks and for explainers
# We must include all columns we may need to sort by
columns = [:id, :title, :language, :created_at, :updated_at]
fact_checks = self.filtered_fact_checks(filters, false).select(["'FactCheck' AS type"] + columns.collect{ |column| "fact_checks.#{column}" })
explainers = self.filtered_explainers(filters).select(["'Explainer' AS type"] + columns.collect{ |column| "explainers.#{column}" })

# Combine the two queries using SQL UNION
query = <<~SQL
SELECT type, id, title, language, created_at, updated_at FROM ( #{fact_checks.to_sql} UNION #{explainers.to_sql} ) AS articles
ORDER BY #{order} #{order_type} LIMIT ? OFFSET ?
SQL
results = ActiveRecord::Base.connection.exec_query(ActiveRecord::Base.sanitize_sql([query, limit, offset])).map{ |row| OpenStruct.new(row) }

# Pre-load objects in memory in order to avoid an N + 1 queries problem
preloaded_results = {}
fact_check_ids = results.select{ |result| result.type == 'FactCheck' }.map(&:id)
preloaded_results['FactCheck'] = FactCheck.includes(:claim_description).where(id: fact_check_ids).where('claim_descriptions.team_id' => self.id).to_a.to_h{ |object| [object[:id], object] }
explainer_ids = results.select{ |result| result.type == 'Explainer' }.map(&:id)
preloaded_results['Explainer'] = Explainer.where(id: explainer_ids, team_id: self.id).to_a.to_h{ |object| [object[:id], object] }

# Load full objects from pre-loaded list while keeping the order
results.collect{ |object| preloaded_results[object.type][object.id] }
end

def filtered_explainers(filters = {})
query = self.explainers

Expand All @@ -513,8 +538,9 @@ def filtered_explainers(filters = {})
query
end

def filtered_fact_checks(filters = {})
query = FactCheck.includes(:claim_description).where('claim_descriptions.team_id' => self.id)
def filtered_fact_checks(filters = {}, include_claim_descriptions = true)
query = (include_claim_descriptions ? FactCheck.includes(:claim_description) : FactCheck.joins(:claim_description))
query = query.where('claim_descriptions.team_id' => self.id)

# Filter by standalone
query = query.left_joins(claim_description: { project_media: :media }).where('claim_descriptions.project_media_id IS NULL OR medias.type = ?', 'Blank') if filters[:standalone]
Expand Down Expand Up @@ -560,9 +586,9 @@ def filtered_fact_checks(filters = {})
def filter_by_keywords(query, filters, type = 'FactCheck')
tsquery = Team.sanitize_sql_array(["websearch_to_tsquery(?)", filters[:text]])
if type == 'FactCheck'
tsvector = "to_tsvector('simple', coalesce(title, '') || ' ' || coalesce(summary, '') || ' ' || coalesce(url, '') || ' ' || coalesce(claim_descriptions.description, '') || ' ' || coalesce(claim_descriptions.context, ''))"
else
tsvector = "to_tsvector('simple', coalesce(title, '') || ' ' || coalesce(description, '') || ' ' || coalesce(url, ''))"
tsvector = "to_tsvector('simple', coalesce(fact_checks.title, '') || ' ' || coalesce(fact_checks.summary, '') || ' ' || coalesce(fact_checks.url, '') || ' ' || coalesce(claim_descriptions.description, '') || ' ' || coalesce(claim_descriptions.context, ''))"
elsif type == 'Explainer'
tsvector = "to_tsvector('simple', coalesce(explainers.title, '') || ' ' || coalesce(explainers.description, '') || ' ' || coalesce(explainers.url, ''))"
end
query.where(Arel.sql("#{tsvector} @@ #{tsquery}"))
end
Expand Down
3 changes: 2 additions & 1 deletion lib/relay.idl
Original file line number Diff line number Diff line change
Expand Up @@ -13183,7 +13183,7 @@ type Team implements Node {
Returns the elements in the list that come after the specified cursor.
"""
after: String
article_type: String!
article_type: String

"""
Returns the elements in the list that come before the specified cursor.
Expand All @@ -13202,6 +13202,7 @@ type Team implements Node {
Returns the last _n_ elements from the list.
"""
last: Int
limit: Int = 10
offset: Int = 0
publisher_ids: [Int]
rating: [String]
Expand Down
22 changes: 15 additions & 7 deletions public/relay.json
Original file line number Diff line number Diff line change
Expand Up @@ -69221,18 +69221,26 @@
"name": "article_type",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null,
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "limit",
"description": null,
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"defaultValue": "10",
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "offset",
"description": null,
Expand Down
16 changes: 10 additions & 6 deletions test/controllers/graphql_controller_12_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -407,18 +407,22 @@ def teardown
assert_equal [fc.id], data.collect{ |edge| edge['node']['dbid'] }
end

test "should get team articles (all)" do
test "should get all team articles" do
Sidekiq::Testing.fake!
authenticate_with_user(@u)
pm = create_project_media team: @t
cd = create_claim_description project_media: pm
create_fact_check claim_description: cd, tags: ['foo', 'bar']
create_explainer team: @t
query = "query { team(slug: \"#{@t.slug}\") { articles_count } }"
create_fact_check claim_description: cd, title: 'Foo'
sleep 1
create_explainer team: @t, title: 'Test'
sleep 1
create_explainer team: @t, title: 'Bar'
query = "query { team(slug: \"#{@t.slug}\") { articles(first: 2, sort: \"title\", sort_type: \"asc\") { edges { node { ... on Explainer { title }, ... on FactCheck { title } } } }, articles_count } }"
post :create, params: { query: query, team: @t.slug }
assert_response :success
team = JSON.parse(@response.body)['data']['team']
assert_equal 2, team['articles_count']
data = JSON.parse(@response.body)['data']
assert_equal 3, data.dig('team', 'articles_count')
assert_equal ['Bar', 'Foo'], data.dig('team', 'articles', 'edges').collect{ |node| node.dig('node', 'title') }
end

test "should create api key" do
Expand Down
59 changes: 59 additions & 0 deletions test/models/team_2_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -1591,6 +1591,65 @@ def setup
assert_equal [], t.search_for_similar_articles('Test')
end

test "should return all articles" do
Sidekiq::Testing.fake!
t = create_team
t.set_languages ['en', 'es']
t.save!
u = create_user
create_team_user team: t, user: u, role: 'admin'
pm = create_project_media team: t
fc = create_fact_check title: 'Foo Test', user: u, publisher_id: u.id, tags: ['test'], claim_description: create_claim_description(project_media: create_project_media(team: t, media: Blank.create!)), language: 'en'
fc.updated_at = Time.now + 1
fc.save!
sleep 1
ex = create_explainer team: t, title: 'Bar Test', user: u, tags: ['test'], language: 'es'
ex.updated_at = Time.now + 1
ex.save!
create_explainer team: t
create_explainer team: t
create_explainer

# Ensure we test for all filters
filters = {
user_ids: [u.id],
tags: ['test'],
language: ['en', 'es'],
created_at: { 'start_time' => Time.now.yesterday.to_datetime.to_s, 'end_time' => Time.now.tomorrow.to_datetime.to_s }.to_json,
updated_at: { 'start_time' => Time.now.yesterday.to_datetime.to_s, 'end_time' => Time.now.tomorrow.to_datetime.to_s }.to_json,
text: 'Test',
standalone: true,
publisher_ids: [u.id],
report_status: ['unpublished'],
rating: ['undetermined'],
imported: false,
target_id: pm.id,
trashed: false
}
assert_equal 2, t.filtered_articles(filters).count

# Ensure we test pagination
assert_equal ['Foo Test'], t.filtered_articles(filters, 1, 0, 'created_at', 'ASC').map(&:title)
assert_equal ['Bar Test'], t.filtered_articles(filters, 1, 1, 'created_at', 'ASC').map(&:title)

# Ensure we test for all sorting options
# Sort by updated date
assert_equal ['Foo Test'], t.filtered_articles(filters, 1, 0, 'updated_at', 'ASC').map(&:title)
assert_equal ['Bar Test'], t.filtered_articles(filters, 1, 0, 'updated_at', 'DESC').map(&:title)
# Sort by language
assert_equal ['Foo Test'], t.filtered_articles(filters, 1, 0, 'language', 'ASC').map(&:title)
assert_equal ['Bar Test'], t.filtered_articles(filters, 1, 0, 'language', 'DESC').map(&:title)
# Sort by title
assert_equal ['Bar Test'], t.filtered_articles(filters, 1, 0, 'title', 'ASC').map(&:title)
assert_equal ['Foo Test'], t.filtered_articles(filters, 1, 0, 'title', 'DESC').map(&:title)

# Ensure we don't have a N + 1 queries problem
# 4 queries: 1 for the target item, 1 for the articles, 1 for all fact-checks and 1 for all explainers
assert_queries 4, '=' do
t.filtered_articles
end
end

test "should return platforms for which statistics are available" do
t = create_team
pm = create_project_media team: t
Expand Down

0 comments on commit 875f0e4

Please sign in to comment.