Skip to content

Commit

Permalink
Fix and log kinesis duplicates (#50)
Browse files Browse the repository at this point in the history
* update kinesis stream. to log duplicates into sentry

* add docs to aws for kinesis stream

* update sentry to capture message

* add spec to check if it at least dedupes
  • Loading branch information
yuenmichelle1 authored Dec 19, 2023
1 parent 3774e85 commit 049e317
Show file tree
Hide file tree
Showing 2 changed files with 88 additions and 6 deletions.
82 changes: 76 additions & 6 deletions app/models/kinesis_stream.rb
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,84 @@ def initialize

def create_events(payload)
receive(payload)
# TO DO (possibly?): We may want to consider doing these upserts/inserts in batches to improve performance.
CommentEvent.upsert_all(@comment_events, unique_by: %i[comment_id event_time]) unless @comment_events.empty?
unless @classification_events.empty?
ClassificationEvent.upsert_all(@classification_events,
unique_by: %i[classification_id event_time])

# Because ERAS is one of the ONLY receiving apps that receives from kinesis and BULK UPSERTS (feature of Rails 6+), it has caught duplicates on payload from kinesis stream
# See: https://zooniverse-27.sentry.io/issues/4717869260/?project=4506117954011141&query=is%3Aunresolved&referrer=issue-stream&statsPeriod=14d&stream_index=3
# EVEN THOUGH de-duping the payload by id before upserting should resolve issues with ERAS, (since ERAS only cares about counting the classification/comment once),
# UNFORTUNATELY, there are other apps (eg. Caesar, Tove) that rely on the kinesis stream and where duplicates in payload may affect results.
# Since ERAS is one of the only places we can catch this error (because of how it can bulk upsert), the team has decided to log the error to Sentry when the duplicates in payload occurs
# and also log the payload to Sentry.
## Should note that this duplicate error has been seen before:
## SEE: https://github.com/zooniverse/zoo-stats-api-graphql/pull/128
## ALSO NOTING: THIS CATCH, LOG, DEDUPE AND TRY UPSERTING AGAIN TO DB SITUATION IS TEMPORARY AND ONLY USED
## TO SEE WHAT THE DUPES IN THE KINESIS PAYLOAD ARE.
## ONE MORE NOTE: per Kinesis docs, it is VERY possible for Kinesis stream to send duplicates and
## the recommendation of AWS is to appropriately handle process records.
## SEE: https://docs.aws.amazon.com/streams/latest/dev/kinesis-record-processor-duplicates.html

upsert_comments unless @comment_events.empty?
upsert_classifications unless @classification_events.empty?
upsert_classification_user_groups unless @classification_user_groups.empty?
end

def upsert_comments
CommentEvent.upsert_all(@comment_events, unique_by: %i[comment_id event_time])
rescue StandardError => e
crumb = Sentry::Breadcrumb.new(
category: 'upsert_error_in_comments',
message: 'Comment Events Upsert Error',
data: {
payload: @comment_events,
error_message: e.message
},
level: 'warning'
)
Sentry.add_breadcrumb(crumb)
Sentry.capture_exception(e)
if e.message.include?('ON CONFLICT DO UPDATE command cannot affect row a second time')
@comment_events = @comment_events.uniq { |comment| comment[:comment_id] }
retry
end
end

ClassificationUserGroup.upsert_all(@classification_user_groups.flatten, unique_by: %i[classification_id event_time user_group_id user_id]) unless @classification_user_groups.empty?
def upsert_classifications
ClassificationEvent.upsert_all(@classification_events, unique_by: %i[classification_id event_time])
rescue StandardError => e
crumb = Sentry::Breadcrumb.new(
category: 'upsert_error_in_classifications',
message: 'Classification Events Upsert Error',
data: {
payload: @classification_events,
error_message: e.message
},
level: 'warning'
)
Sentry.add_breadcrumb(crumb)
Sentry.capture_exception(e)
if e.message.include?('ON CONFLICT DO UPDATE command cannot affect row a second time')
@classification_events = @classification_events.uniq { |classification| classification[:classification_id] }
retry
end
end

def upsert_classification_user_groups
ClassificationUserGroup.upsert_all(@classification_user_groups.flatten, unique_by: %i[classification_id event_time user_group_id user_id])
rescue StandardError => e
crumb = Sentry::Breadcrumb.new(
category: 'upsert_error_in_classifications_user_groups',
message: 'Classification User Groups Upsert Error',
data: {
payload: @classification_user_groups,
error_message: e.message
},
level: 'warning'
)
Sentry.add_breadcrumb(crumb)
Sentry.capture_exception(e)
if e.message.include?('ON CONFLICT DO UPDATE command cannot affect row a second time')
@classification_user_groups = @classification_user_groups.uniq { |cug| [cug[:classification_id], cug[:user_group_id]] }
retry
end
end

def receive(payload)
Expand Down
12 changes: 12 additions & 0 deletions spec/models/kinesis_stream_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -71,5 +71,17 @@
end.to change(ClassificationUserGroup, :count).from(0).to(1)
end
end

context 'payload has duplicates' do
it 'dedupes classification_events' do
kinesis_stream.create_events([classification_payload, classification_payload])
expect(kinesis_stream.instance_variable_get(:@classification_events).length).to eq(1)
end

it 'dedupes comment_events' do
kinesis_stream.create_events([comment_payload, comment_payload, comment_payload])
expect(kinesis_stream.instance_variable_get(:@comment_events).length).to eq(1)
end
end
end
end

0 comments on commit 049e317

Please sign in to comment.