Fix pagination with unread filter
fixes VICE-3170 flag = react_discussion Test Plan: Solution: - any entries marked as read after selecting the unread filter, will still return under the unread filter, until the filter changes or a page refresh. - note the mutation will sill update and the blue unread icon will disappear. Have at least 60 unread entries (3 pages). Have some unread entries. 1. Use the unread filter. 2. Notice the unread entries can update to read, however. If you switch pages the same set of total unread entries will be returned. 3. Upon page refresh or switching filter, then returning to unread the entries will no longer return. Change-Id: I911f85ed8a23b7f8bdfa22edce5745e79b389307 Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/304016 Tested-by: Service Cloud Jenkins <svc.cloudjenkins@instructure.com> Reviewed-by: Omar Soto-Fortuño <omar.soto@instructure.com> Product-Review: Omar Soto-Fortuño <omar.soto@instructure.com> QA-Review: Caleb Guanzon <cguanzon@instructure.com> Migration-Review: Jeremy Stanley <jeremy@instructure.com>
This commit is contained in:
parent
1f2f837a55
commit
a2314df6cc
|
@ -19,7 +19,7 @@
|
|||
#
|
||||
|
||||
class Loaders::DiscussionEntryLoader < GraphQL::Batch::Loader
|
||||
def initialize(current_user:, search_term: nil, sort_order: :desc, filter: nil, root_entries: false, relative_entry_id: nil, before_relative_entry: true, include_relative_entry: true, user_search_id: nil)
|
||||
def initialize(current_user:, search_term: nil, sort_order: :desc, filter: nil, root_entries: false, relative_entry_id: nil, before_relative_entry: true, include_relative_entry: true, user_search_id: nil, unread_before: nil)
|
||||
super()
|
||||
@current_user = current_user
|
||||
@search_term = search_term
|
||||
|
@ -30,6 +30,7 @@ class Loaders::DiscussionEntryLoader < GraphQL::Batch::Loader
|
|||
@relative_entry_id = relative_entry_id
|
||||
@before_entry = before_relative_entry
|
||||
@include_entry = include_relative_entry
|
||||
@unread_before = unread_before
|
||||
end
|
||||
|
||||
def perform(objects)
|
||||
|
@ -87,7 +88,7 @@ class Loaders::DiscussionEntryLoader < GraphQL::Batch::Loader
|
|||
end
|
||||
|
||||
# unread filter is used like search results and need to exclude deleted entries
|
||||
scope = scope.active.unread_for_user(@current_user) if @filter == "unread"
|
||||
scope = scope.active.unread_for_user_before(@current_user, @unread_before) if @filter == "unread"
|
||||
scope = scope.where(workflow_state: "deleted") if @filter == "deleted"
|
||||
scope = scope.where(user_id: @user_search_id) unless @user_search_id.nil?
|
||||
scope = scope.preload(:user, :editor)
|
||||
|
|
|
@ -114,6 +114,7 @@ module Types
|
|||
argument :sort_order, Types::DiscussionSortOrderType, required: false
|
||||
argument :root_entries, Boolean, required: false
|
||||
argument :user_search_id, String, required: false
|
||||
argument :unread_before, String, required: false
|
||||
end
|
||||
def discussion_entries_connection(**args)
|
||||
get_entries(args)
|
||||
|
@ -276,6 +277,7 @@ module Types
|
|||
argument :filter, Types::DiscussionFilterType, required: false
|
||||
argument :sort_order, Types::DiscussionSortOrderType, required: false
|
||||
argument :root_entries, Boolean, required: false
|
||||
argument :unread_before, String, required: false
|
||||
end
|
||||
def entries_total_pages(**args)
|
||||
get_entry_page_count(args)
|
||||
|
@ -319,7 +321,7 @@ module Types
|
|||
).load(object)
|
||||
end
|
||||
|
||||
def get_entries(search_term: nil, filter: nil, sort_order: :asc, root_entries: false, user_search_id: nil)
|
||||
def get_entries(search_term: nil, filter: nil, sort_order: :asc, root_entries: false, user_search_id: nil, unread_before: nil)
|
||||
return [] if object.initial_post_required?(current_user, session) || !available_for_user
|
||||
|
||||
Loaders::DiscussionEntryLoader.for(
|
||||
|
@ -328,7 +330,8 @@ module Types
|
|||
filter: filter,
|
||||
sort_order: sort_order,
|
||||
root_entries: root_entries,
|
||||
user_search_id: user_search_id
|
||||
user_search_id: user_search_id,
|
||||
unread_before: unread_before
|
||||
).load(object)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -373,6 +373,7 @@ class DiscussionEntry < ActiveRecord::Base
|
|||
scope :newest_first, -> { order("discussion_entries.created_at DESC, discussion_entries.id DESC") }
|
||||
# when there is no discussion_entry_participant for a user, it is considered unread
|
||||
scope :unread_for_user, ->(user) { joins(participant_join_sql(user)).where(discussion_entry_participants: { workflow_state: ["unread", nil] }) }
|
||||
scope :unread_for_user_before, ->(user, unread_before = 1.minute.ago.utc) { where(discussion_entry_participants: { workflow_state: ["unread", nil] }).or(where("discussion_entry_participants.workflow_state = 'read' AND COALESCE(discussion_entry_participants.read_at, '2022-10-28') >= ?", unread_before)).joins(participant_join_sql(user)) }
|
||||
|
||||
def self.participant_join_sql(current_user)
|
||||
sanitize_sql(["LEFT OUTER JOIN #{DiscussionEntryParticipant.quoted_table_name} ON discussion_entries.id = discussion_entry_participants.discussion_entry_id
|
||||
|
|
|
@ -81,7 +81,7 @@ class DiscussionEntryParticipant < ActiveRecord::Base
|
|||
return not_null_column_object(column: :entry, entry: entry_or_topic, user: user) unless entry_or_topic
|
||||
return not_null_column_object(column: :user, entry: entry_or_topic, user: user) unless user
|
||||
|
||||
insert_columns = %w[discussion_entry_id user_id root_account_id workflow_state]
|
||||
insert_columns = %w[discussion_entry_id user_id root_account_id workflow_state read_at]
|
||||
update_columns = []
|
||||
update_values = []
|
||||
|
||||
|
@ -128,6 +128,10 @@ class DiscussionEntryParticipant < ActiveRecord::Base
|
|||
if new_state
|
||||
update_columns << "workflow_state"
|
||||
update_values << connection.quote(new_state)
|
||||
|
||||
read_at_datetime = (new_state&.to_s == "read") ? Time.now : nil
|
||||
update_columns << "read_at"
|
||||
update_values << connection.quote(read_at_datetime)
|
||||
end
|
||||
|
||||
# if there are no values in the update_columns, there is no point to
|
||||
|
@ -162,11 +166,13 @@ class DiscussionEntryParticipant < ActiveRecord::Base
|
|||
end
|
||||
|
||||
def self.row_values(batch_entry, user_id, root_account_id, default_state, update_values)
|
||||
read_at_datetime = (default_state&.to_s == "read") ? Time.now : nil
|
||||
[
|
||||
connection.quote(batch_entry.is_a?(ActiveRecord::Base) ? batch_entry.id_for_database : batch_entry),
|
||||
connection.quote(user_id),
|
||||
connection.quote(root_account_id),
|
||||
connection.quote(default_state),
|
||||
connection.quote(read_at_datetime),
|
||||
] + update_values
|
||||
end
|
||||
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
#
|
||||
# Copyright (C) 2022 - present Instructure, Inc.
|
||||
#
|
||||
# This file is part of Canvas.
|
||||
#
|
||||
# Canvas is free software: you can redistribute it and/or modify it under
|
||||
# the terms of the GNU Affero General Public License as published by the Free
|
||||
# Software Foundation, version 3 of the License.
|
||||
#
|
||||
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
||||
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
|
||||
# details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
class AddReadAtToDiscussionEntryParticipant < ActiveRecord::Migration[6.1]
|
||||
tag :predeploy
|
||||
|
||||
def change
|
||||
add_column :discussion_entry_participants, :read_at, :datetime, default: nil
|
||||
end
|
||||
end
|
|
@ -260,6 +260,25 @@ describe Loaders::DiscussionEntryLoader do
|
|||
end
|
||||
end
|
||||
|
||||
context "search term" do
|
||||
it "by unread workflow state" do
|
||||
# explicit and implicit read states
|
||||
@de1.change_read_state("read", @teacher)
|
||||
@de2.change_read_state("unread", @teacher)
|
||||
@de4.discussion_entry_participants.where(user_id: @teacher).delete_all
|
||||
|
||||
GraphQL::Batch.batch do
|
||||
Loaders::DiscussionEntryLoader.for(
|
||||
current_user: @teacher,
|
||||
filter: "unread",
|
||||
search_term: "touch"
|
||||
).load(@discussion).then do |discussion_entries|
|
||||
expect(discussion_entries).to match_array([@de2])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
it "by deleted workflow state" do
|
||||
GraphQL::Batch.batch do
|
||||
Loaders::DiscussionEntryLoader.for(
|
||||
|
|
|
@ -69,6 +69,41 @@ describe DiscussionEntryParticipant do
|
|||
end
|
||||
end
|
||||
|
||||
context "workflow_state.changed to read" do
|
||||
it "set read_at to Time.now" do
|
||||
Timecop.safe_mode = false
|
||||
Timecop.freeze
|
||||
# Make a new user a discussion entry participant to the old entry, so they will default unread.
|
||||
student_2 = student_in_course(active_all: true).user
|
||||
@entry.change_read_state("read", student_2)
|
||||
|
||||
participant_2 = @entry.find_existing_participant(student_2)
|
||||
expect(participant_2.read_at).to be_within(10.seconds).of Time.now.utc
|
||||
ensure
|
||||
Timecop.return
|
||||
Timecop.safe_mode = true
|
||||
end
|
||||
end
|
||||
|
||||
context "workflow_state.changed to not read" do
|
||||
it "set read_at to nil" do
|
||||
Timecop.safe_mode = false
|
||||
Timecop.freeze
|
||||
student_2 = student_in_course(active_all: true).user
|
||||
@entry.change_read_state("read", student_2)
|
||||
|
||||
participant_2 = @entry.find_existing_participant(student_2)
|
||||
expect(participant_2.read_at).to be_within(10.seconds).of Time.now.utc
|
||||
|
||||
@entry.change_read_state("unread", student_2)
|
||||
participant_2.reload
|
||||
expect(participant_2.read_at).to be(nil)
|
||||
ensure
|
||||
Timecop.return
|
||||
Timecop.safe_mode = true
|
||||
end
|
||||
end
|
||||
|
||||
it "returns early if there is no entry" do
|
||||
expect(DiscussionEntryParticipant.upsert_for_entries(nil, double)).to be_nil
|
||||
end
|
||||
|
|
|
@ -437,6 +437,40 @@ describe DiscussionEntry do
|
|||
expect(@topic_updated_at.to_i).not_to eq @topic.reload.updated_at.to_i
|
||||
end
|
||||
|
||||
describe ".unread_for_user_before(user, read_at)" do
|
||||
before(:once) do
|
||||
course_with_teacher(active_all: true)
|
||||
student_in_course(active_all: true)
|
||||
@topic = @course.discussion_topics.create!(title: "title", message: "message", user: @teacher)
|
||||
end
|
||||
|
||||
it "returns all unread entries for user or entries unread before the given time" do
|
||||
# it returns entries read after the given time, but
|
||||
# it should be interpreted as, entries that were unread before the given time.
|
||||
|
||||
# scenario: you visit a page for unread entries;
|
||||
# as you scroll the page you read entries at (later times, then you go to next page and entries are now read AFTER your initial QUERY time.
|
||||
|
||||
Timecop.safe_mode = false
|
||||
Timecop.freeze(Time.utc(2013, 3, 13, 9, 12))
|
||||
@entry1 = @topic.discussion_entries.create!(message: "entry 1 outside", user: @teacher)
|
||||
@entry1.change_read_state("read", @student)
|
||||
|
||||
Timecop.freeze(Time.utc(2013, 3, 13, 10, 12))
|
||||
@entry2 = @topic.discussion_entries.create!(message: "entry 2", user: @teacher)
|
||||
@entry3 = @topic.discussion_entries.create!(message: "entry 3", user: @teacher)
|
||||
|
||||
@entry2.change_read_state("read", @student)
|
||||
|
||||
# Notice we read the entries 1 min after the the query issues
|
||||
expect(DiscussionEntry.unread_for_user_before(@student, Time.utc(2013, 3, 13, 10, 11)).order("id").map(&:message)).to eq(["entry 2", "entry 3"])
|
||||
|
||||
ensure
|
||||
Timecop.return
|
||||
Timecop.safe_mode = true
|
||||
end
|
||||
end
|
||||
|
||||
context "read/unread state" do
|
||||
before(:once) do
|
||||
course_with_teacher(active_all: true)
|
||||
|
|
|
@ -47,6 +47,7 @@ export const getDiscussionQueryMock = ({
|
|||
sort = 'desc',
|
||||
shouldError = false,
|
||||
isGroup = true,
|
||||
unreadBefore = '',
|
||||
} = {}) => [
|
||||
{
|
||||
request: {
|
||||
|
@ -61,6 +62,7 @@ export const getDiscussionQueryMock = ({
|
|||
rootEntries,
|
||||
searchTerm,
|
||||
sort,
|
||||
unreadBefore,
|
||||
},
|
||||
},
|
||||
result: {
|
||||
|
@ -129,6 +131,7 @@ export const getAnonymousDiscussionQueryMock = ({
|
|||
searchTerm = '',
|
||||
sort = 'desc',
|
||||
shouldError = false,
|
||||
unreadBefore = '',
|
||||
} = {}) => [
|
||||
{
|
||||
request: {
|
||||
|
@ -143,6 +146,7 @@ export const getAnonymousDiscussionQueryMock = ({
|
|||
rootEntries,
|
||||
searchTerm,
|
||||
sort,
|
||||
unreadBefore,
|
||||
},
|
||||
},
|
||||
result: {
|
||||
|
|
|
@ -39,6 +39,7 @@ export const DISCUSSION_QUERY = gql`
|
|||
$sort: DiscussionSortOrderType
|
||||
$courseID: String
|
||||
$rolePillTypes: [String!] = ["TaEnrollment", "TeacherEnrollment", "DesignerEnrollment"]
|
||||
$unreadBefore: String
|
||||
) {
|
||||
legacyNode(_id: $discussionID, type: Discussion) {
|
||||
... on Discussion {
|
||||
|
@ -62,6 +63,7 @@ export const DISCUSSION_QUERY = gql`
|
|||
filter: $filter
|
||||
sortOrder: $sort
|
||||
userSearchId: $userSearchId
|
||||
unreadBefore: $unreadBefore
|
||||
) {
|
||||
nodes {
|
||||
...DiscussionEntry
|
||||
|
@ -94,6 +96,7 @@ export const DISCUSSION_QUERY = gql`
|
|||
rootEntries: $rootEntries
|
||||
filter: $filter
|
||||
searchTerm: $searchTerm
|
||||
unreadBefore: $unreadBefore
|
||||
)
|
||||
searchEntryCount(filter: $filter, searchTerm: $searchTerm)
|
||||
groupSet {
|
||||
|
|
|
@ -43,6 +43,7 @@ const I18n = useI18nScope('discussion_topics_post')
|
|||
const DiscussionTopicManager = props => {
|
||||
const [searchTerm, setSearchTerm] = useState('')
|
||||
const [filter, setFilter] = useState('all')
|
||||
const [unreadBefore, setUnreadBefore] = useState('')
|
||||
const [sort, setSort] = useState('desc')
|
||||
const [pageNumber, setPageNumber] = useState(ENV.current_page)
|
||||
const [searchPageNumber, setSearchPageNumber] = useState(0)
|
||||
|
@ -87,6 +88,16 @@ const DiscussionTopicManager = props => {
|
|||
setReplyFromId,
|
||||
}
|
||||
|
||||
// Unread filter
|
||||
// This introduces a double query for DISCUSSION_QUERY when filter changes
|
||||
useEffect(() => {
|
||||
if (filter === 'unread' && !unreadBefore) {
|
||||
setUnreadBefore(new Date(Date.now()).toISOString())
|
||||
} else if (filter !== 'unread') {
|
||||
setUnreadBefore('')
|
||||
}
|
||||
}, [filter, unreadBefore])
|
||||
|
||||
// Reset search to 0 when inactive
|
||||
useEffect(() => {
|
||||
if (searchTerm && pageNumber !== 0) {
|
||||
|
@ -144,13 +155,21 @@ const DiscussionTopicManager = props => {
|
|||
filter,
|
||||
sort,
|
||||
courseID: window.ENV?.course_id,
|
||||
unreadBefore,
|
||||
}
|
||||
|
||||
// Unread and unreadBefore both useState and trigger useQuery. We need to wait until both are set.
|
||||
// Also when switching from unread the same issue causes 2 queries when unreadBefore switches to '',
|
||||
// but unreadBefore only applies to unread filter. So we dont need the extra query.
|
||||
const waitForUnreadFilter =
|
||||
(filter === 'unread' && !unreadBefore) || (filter !== 'unread' && unreadBefore)
|
||||
|
||||
// in some cases, we want to refresh the results rather that use the current cache:
|
||||
// in the case: 'isUserMissingInitialPost' the cache is empty so we need to get the entries.
|
||||
const discussionTopicQuery = useQuery(DISCUSSION_QUERY, {
|
||||
variables,
|
||||
fetchPolicy: isUserMissingInitialPost || searchTerm ? 'network-only' : 'cache-and-network',
|
||||
skip: waitForUnreadFilter,
|
||||
})
|
||||
|
||||
const updateDraftCache = (cache, result) => {
|
||||
|
@ -246,7 +265,16 @@ const DiscussionTopicManager = props => {
|
|||
},
|
||||
})
|
||||
|
||||
if (discussionTopicQuery.loading && !searchTerm && filter === 'all') {
|
||||
// why || waitForUnreadFilter: when waitForUnreadFilter, discussionTopicQuery is skipped, but this does not set loading.
|
||||
// why && !searchTerm: this is for the search if you type it triggers useQuery and you lose the search.
|
||||
// why not just discussionTopicQuery.loading, it doesnt play nice with search term.
|
||||
if (
|
||||
(discussionTopicQuery.loading && !searchTerm) ||
|
||||
waitForUnreadFilter ||
|
||||
(discussionTopicQuery.loading &&
|
||||
discussionTopicQuery?.data &&
|
||||
Object.keys(discussionTopicQuery.data).length === 0)
|
||||
) {
|
||||
return <LoadingIndicator />
|
||||
}
|
||||
|
||||
|
|
|
@ -183,7 +183,16 @@ describe('DiscussionFullPage', () => {
|
|||
})
|
||||
|
||||
it('updates discussion entry', async () => {
|
||||
const mocks = [...getDiscussionQueryMock(), ...updateDiscussionEntryMock()]
|
||||
const unreadTime = new Date('2019-05-14T11:01:58.135Z').toISOString()
|
||||
jest
|
||||
.spyOn(global.Date, 'now')
|
||||
.mockImplementation(() => new Date('2019-05-14T11:01:58.135Z').valueOf())
|
||||
|
||||
const mocks = [
|
||||
...getDiscussionQueryMock({unreadBefore: unreadTime}),
|
||||
...getDiscussionQueryMock(),
|
||||
...updateDiscussionEntryMock(),
|
||||
]
|
||||
const container = setup(mocks)
|
||||
expect(await container.findByText('This is the parent reply')).toBeInTheDocument()
|
||||
|
||||
|
@ -208,9 +217,14 @@ describe('DiscussionFullPage', () => {
|
|||
|
||||
describe('searchFilter', () => {
|
||||
it('filters by unread', async () => {
|
||||
const unreadTime = new Date('2019-05-14T11:01:58.135Z').toISOString()
|
||||
jest
|
||||
.spyOn(global.Date, 'now')
|
||||
.mockImplementation(() => new Date('2019-05-14T11:01:58.135Z').valueOf())
|
||||
|
||||
const mocks = [
|
||||
...getDiscussionQueryMock(),
|
||||
...getDiscussionQueryMock({filter: 'unread', rootEntries: false}),
|
||||
...getDiscussionQueryMock({filter: 'unread', rootEntries: false, unreadBefore: unreadTime}),
|
||||
]
|
||||
const container = setup(mocks)
|
||||
expect(await container.findByText('This is a Discussion Topic Message')).toBeInTheDocument()
|
||||
|
@ -373,7 +387,16 @@ describe('DiscussionFullPage', () => {
|
|||
// For some reason when we add a reply to a discussion topic we end up performing
|
||||
// 2 additional discussion queries. Until we address that issue we need to specify
|
||||
// these queries in our mocks we provide to MockedProvider
|
||||
const mocks = [...getDiscussionQueryMock(), ...createDiscussionEntryMock()]
|
||||
|
||||
const unreadTime = new Date('2019-05-14T11:01:58.135Z').toISOString()
|
||||
jest
|
||||
.spyOn(global.Date, 'now')
|
||||
.mockImplementation(() => new Date('2019-05-14T11:01:58.135Z').valueOf())
|
||||
const mocks = [
|
||||
...getDiscussionQueryMock({unreadBefore: ''}),
|
||||
...getDiscussionQueryMock({unreadBefore: unreadTime}),
|
||||
...createDiscussionEntryMock(),
|
||||
]
|
||||
const container = setup(mocks)
|
||||
|
||||
const replyButton = await container.findByTestId('discussion-topic-reply')
|
||||
|
|
Loading…
Reference in New Issue