diff --git a/app/models/concerns/labware_creators/donor_pooling_calculator.rb b/app/models/concerns/labware_creators/donor_pooling_calculator.rb new file mode 100644 index 000000000..6856db987 --- /dev/null +++ b/app/models/concerns/labware_creators/donor_pooling_calculator.rb @@ -0,0 +1,162 @@ +# frozen_string_literal: true + +# This module contains algorithms to allocate source wells into a target number of pools. +module LabwareCreators::DonorPoolingCalculator + extend ActiveSupport::Concern + + # Splits wells into groups by study and project. Wells are grouped based on the + # study and project of the first aliquot in each well (only one aliquot is + # expected per well). Returns an array of groups, where each group is an array + # of wells with the same study and project. + # + # If the input group is [w1, w2, w3, w4, w5, w6, w7, w8, w9] + # where w1, w2, w3, w4, w5, w6, w7, w8, and w9 are wells with (study_id, project_id), + # + # w1(1,1) + # w2(1,2) + # w3(1,3) + # w4(1,1) + # w5(1,2) + # w6(1,3) + # w7(1,1) + # w8(2,1) + # w9(2,2) + # + # the result will be: + # [[w1, w4, w7], [w2, w5], [w3, w6], [w8], [w9]] + # + # @param group [Array] The group of wells to be split. + # @return [Array>] An array of well groups. + def split_single_group_by_study_and_project(group) + group.group_by { |well| [well.aliquots.first.study.id, well.aliquots.first.project.id] }.values + end + + # Splits groups ensuring unique donor_ids within each group. Iterates over + # each group, creating subgroups with wells from a unique donor. The first + # occurrences of unique donor_ids are grouped, then the second occurrences, + # and so on. This prevents combining samples with the same donor_id. The + # result is flattened to a single array of subgroups. + # + # If the input groups are [[w1, w2, w3, w4], [w5, w6, w7], [w8, w9]] + # where w1, w2, w3, w4, w5, w6, w7, w8, and w9 are wells with (donor_id), + # + # w1(1) + # w2(2) + # w3(3) + # w4(1) + # w5(4) + # w6(4) + # w7(5) + # w8(6) + # w9(7) + # + # the result will be: + # [[w1, w2, w3], [w4], [w5, w7], [w6], [w8, w9]] + # + # Note that the input groups are not mixed. donor_ids are unique within each + # result subgroup. + # + # @param groups [Array>] Array of well groups to be split. + # @return [Array>] Array of subgroups split by donor ID. + def split_groups_by_unique_donor_ids(groups) + groups.flat_map { |group| split_single_group_by_unique_donor_ids(group) } + end + + # Splits a single group of wells by donor_ids. This method is used by the + # 'split_groups_by_unique_donor_ids' method. It iteratively segregates wells with + # the first encountered instance of each unique donor_id into a separate + # subgroup. This process continues until there are no wells left in the + # original group. The result is a collection of subgroups, each containing + # wells from distinct donors. + # + # If the input group is [w1, w2, w3, w4, w5, w6, w7, w8, w9] + # where w1, w2, w3, w4, w5, w6, w7, w8, and w9 are wells with (donor_id), + # + # w1(1) + # w2(2) + # w3(3) + # w4(1) + # w5(2) + # w6(4) + # w7(5) + # w8(5) + # w9(5) + # + # the result will be: + # [[w1, w2, w3, w6, w7], [w4, w5, w8], [w9]] + # + # @param group [Array] The group of wells to split. + # @return [Array>] An array of subgroups, each containing wells + # from different donors. + def split_single_group_by_unique_donor_ids(group) + group = group.dup + output = [] + wells_moved = 0 + wells_total = group.size + while wells_moved < wells_total + subgroup = [] + unique_donor_ids(group).each do |donor_id| + wells_moved += 1 + index = group.index { |well| well.aliquots.first.sample.sample_metadata.donor_id == donor_id } + subgroup << group.delete_at(index) + end + output << subgroup + end + output + end + + # Returns the unique donor_ids from a group of wells. Used by the + # 'split_single_group_by_unique_donor_ids' method. + # + # If the input group is [w1, w2, w3, w4, w5, w6, w7, w8, w9] + # where w1, w2, w3, w4, w5, w6, w7, w8, and w9 are wells with (donor_id), + # + # w1(1) + # w2(2) + # w3(3) + # w4(1) + # w5(2) + # w6(4) + # w7(5) + # w8(5) + # w9(5) + # + # the result will be: + # [1, 2, 3, 4, 5] + # + # @param group [Array] The group of wells from which to retrieve donor_ids. + # @return [Array] An array of unique donor_ids. + def unique_donor_ids(group) + group.map { |well| well.aliquots.first.sample.sample_metadata.donor_id }.uniq + end + + # Distributes samples across pools based on group sizes. It sorts the groups + # by size and splits the largest group into two until the number of groups + # equals the number of pools or until all groups have a size of 1. The input + # groups are the result of applying conditions, hence they cannot be mixed. + # + # If the request number of pools is 6 and the input groups are + # [[1, 2, 3], [4, 5], [6, 7, 8, 9]] where the numbers denote wells, + # + # the result will be: + # [[3], [1], [2], [4, 5], [6, 7], [8, 9]] + # + # for which the steps are: + # [[1, 2, 3], [4, 5], [6, 7, 8, 9]] -> 3 pools (input) + # [[4, 5], [6, 7], [8, 9], [1, 2, 3]] -> 4 pools + # [[3], [4, 5], [6, 7], [8, 9], [1, 2]] -> 5 pools + # [[3], [1], [2], [4, 5], [6, 7], [8, 9]] -> 6 pools (output) + # + # @param groups [Array>] Array of well groups to be distributed. + # @return [Array>] Array of distributed groups. + def distribute_groups_across_pools(groups, number_of_pools) + groups = groups.dup + groups.sort_by!(&:size) + while groups.any? && groups.last.size > 1 && groups.size < number_of_pools + largest = groups.pop # last + splits = largest.each_slice((largest.size / 2.0).ceil).to_a + groups.concat(splits).sort_by!(&:size) + end + groups + end +end diff --git a/app/models/concerns/labware_creators/donor_pooling_validator.rb b/app/models/concerns/labware_creators/donor_pooling_validator.rb new file mode 100644 index 000000000..131ac350f --- /dev/null +++ b/app/models/concerns/labware_creators/donor_pooling_validator.rb @@ -0,0 +1,130 @@ +# frozen_string_literal: true + +# This module contains validations for donor pooling. +module LabwareCreators::DonorPoolingValidator + extend ActiveSupport::Concern + + included do + validate :source_barcodes_must_be_entered + validate :source_barcodes_must_be_different + validate :source_plates_must_exist + validate :wells_with_aliquots_must_have_donor_id + validate :number_of_pools_must_not_exceed_configured + end + + SOURCE_BARCODES_MUST_BE_ENTERED = 'At least one source plate must be scanned.' + + SOURCE_BARCODES_MUST_BE_DIFFERENT = 'You must not scan the same barcode more than once.' + + SOURCE_PLATES_MUST_EXIST = + 'Source plates not found: %s. ' \ + 'Please check you scanned the correct source plates. ' + + NUMBER_OF_POOLS_MUST_NOT_EXCEED_CONFIGURED = + 'The calculated number of pools (%s) is higher than the number of pools ' \ + '(%s) configured. This is due to constraints such as: ' \ + '* samples with different Studies or Projects cannot be combined ' \ + '* multiple samples from the same donor cannot be combined. ' \ + 'Please check you have scanned the correct set of source plates.' + + WELLS_WITH_ALIQUOTS_MUST_HAVE_DONOR_ID = + 'All samples must have the donor_id specified. ' \ + 'Wells missing donor_id (on sample metadata): %s' + + # Validates that at least one source barcode has been entered. If no barcodes + # are entered, an error is added to the :source_barcodes attribute. + # + # @return [void] + def source_barcodes_must_be_entered + return if minimal_barcodes.size >= 1 + + errors.add(:source_barcodes, SOURCE_BARCODES_MUST_BE_ENTERED) + end + + # Validates that all source barcodes are unique. If any barcodes are + # duplicated, an error is added to the :source_barcodes attribute. + # + # @return [void] + def source_barcodes_must_be_different + return if minimal_barcodes.size == minimal_barcodes.uniq.size + + errors.add(:source_barcodes, SOURCE_BARCODES_MUST_BE_DIFFERENT) + end + + # Validates that all source plates corresponding to the minimal barcodes exist. + # If the number of source plates does not match the number of minimal barcodes, + # an error is added to the :source_plates attribute. + # + # @return [void] + def source_plates_must_exist + return if source_plates.size == minimal_barcodes.size + + formatted_string = (minimal_barcodes - source_plates.map(&:human_barcode)).join(', ') + + errors.add(:source_plates, format(SOURCE_PLATES_MUST_EXIST, formatted_string)) + end + + # Validates that the number of calculated pools does not exceed the configured + # number of pools. If the number of calculated pools is greater, an error is + # added to the :source_plates attribute. + # + # @return [void] + def number_of_pools_must_not_exceed_configured + # Don't add this error if there are already errors about missing donor_ids. + invalid_wells_hash = locations_with_missing_donor_id + return if invalid_wells_hash.any? + + return if pools.size <= number_of_pools + + errors.add(:source_plates, format(NUMBER_OF_POOLS_MUST_NOT_EXCEED_CONFIGURED, pools.size, number_of_pools)) + end + + # Validates that all wells with aliquots must have a donor_id. + # It uses the locations_with_missing_donor_id method to find any wells that are + # missing a donor_id. If any such wells are found, it adds an error message to + # the source_plates attribute, formatted with the barcodes of the plates and + # the wells that are missing a donor_id. + # + # @return [void] + def wells_with_aliquots_must_have_donor_id + invalid_wells_hash = locations_with_missing_donor_id + return if invalid_wells_hash.empty? + + formatted_string = invalid_wells_hash.map { |barcode, locations| "#{barcode}: #{locations.join(', ')}" }.join(' ') + errors.add(:source_plates, format(WELLS_WITH_ALIQUOTS_MUST_HAVE_DONOR_ID, formatted_string)) + end + + private + + # Checks each source well for pooling for missing donor_id. Returns a hash + # with keys as the barcodes of source plates and values as arrays of well + # locations with missing donor_id. If a plate has no wells with missing + # donor_id, it is not included in the returned hash. This method is used by + # the wells_with_aliquots_must_have_donor_id method to generate an error + # message. + # + # @return [Hash] A hash mapping source plate barcodes to arrays of invalid + # well locations. + def locations_with_missing_donor_id + # source_wells_for_pooling contains filtered wells from source plates + invalid_wells = source_wells_for_pooling.select { |well| missing_donor_id?(well) } + invalid_wells.each_with_object({}) do |well, hash| + plate_barcode = source_wells_to_plates[well].human_barcode # find the plate barcode + hash[plate_barcode] ||= [] + hash[plate_barcode] << well.location + end + end + + # Checks if a well is missing a donor_id. If there is an aliquot, it checks + # if the associated sample_metadata has a donor_id. If the donor_id is + # missing, it returns true. Otherwise, it returns false. + # + # @param well [Well] The well to check. + # @return [Boolean] True if the well is missing a donor_id, false otherwise. + def missing_donor_id?(well) + aliquot = well.aliquots&.first + return false unless aliquot + + (aliquot.sample.sample_metadata.donor_id || '').to_s.strip.blank? + end +end diff --git a/app/models/labware_creators/donor_pooling_plate.rb b/app/models/labware_creators/donor_pooling_plate.rb new file mode 100644 index 000000000..3b5489689 --- /dev/null +++ b/app/models/labware_creators/donor_pooling_plate.rb @@ -0,0 +1,221 @@ +# frozen_string_literal: true + +module LabwareCreators + # This labware creator receives barcodes for a configured number of source + # plates from the user. It pools samples from the passed wells into a + # destination plate. It's used for scRNA Donor Pooling to create 'LRC PBMC + # Pools' plates from 'LRC PBMC Defrost PBS' plates. + # + # The creator imposes restrictions: + # - It doesn't allow combining samples from different studies or projects. + # - It doesn't allow samples with the same donor_id in the same pool. + # + # The number of pools is determined by a lookup table based on sample count. + # Tag depth index is added to aliquot attributes to avoid tag clashes. + class DonorPoolingPlate < Base + include LabwareCreators::CustomPage + include SupportParent::PlateOnly + + include LabwareCreators::DonorPoolingCalculator + include LabwareCreators::DonorPoolingValidator + + # The name of the template that will be used for rendering the barcode + # input page. + self.page = 'donor_pooling_plate' + + # Add the barcodes attribute to the list of attributes for this class. + # It is used by the creation controller to permit the barcodes parameter. + self.attributes += [{ barcodes: [] }] + + # @!attribute [r] barcodes + # @return [Array] an array of barcode strings from the user + attr_reader :barcodes + + # @!attribute [r] minimal_barcodes + # @return [Array] a version of barcodes where any blank values + # have been removed and remaining values have been stripped of leading + # and trailing whitespace + attr_reader :minimal_barcodes + + # Define related objects to be included when retrieving source plates using + # the Sequencescape::API::V2.Plate.find_all method. The 'includes' argument + # of the method is expected to be an array of strings. + SOURCE_PLATE_INCLUDES = %w[ + purpose + wells.aliquots.study + wells.aliquots.project + wells.aliquots.request + wells.aliquots.sample.sample_metadata + wells.requests_as_source + ].freeze + + # The default number of pools to be created if the count is not found in + # the lookup table. + # + # @return [Integer] The default number of pools. + def default_number_of_pools + purpose_config.dig(:creator_class, :args, :default_number_of_pools) + end + + # Returns the number of source plates from the purpose configuration. + # + # @return [Integer] The number of source plates. + def max_number_of_source_plates + @max_number_of_source_plates ||= purpose_config.dig(:creator_class, :args, :max_number_of_source_plates) + end + + # Returns the WellFilter instance associated with this creator. The filter + # uses the callback method 'labware_wells' to get the list of wells to + # filter, which specifies wells in 'passed' state from the source plates. + # The 'source_wells_for_pooling' method is used to get the filtered wells. + # + # @return [WellFilter] The WellFilter instance. + def well_filter + @well_filter ||= WellFilter.new(creator: self) + end + + # Returns all passed wells from the source plates in column order. + # + # @return [Array] An array of passed wells. + def labware_wells + source_plates.flat_map { |plate| plate.wells_in_columns.select(&:passed?) } + end + + # Returns all source plates associated with the minimal barcodes. + # + # @return [Array] An array of source plates. + def source_plates + @source_plates ||= + Sequencescape::Api::V2::Plate.find_all({ barcode: minimal_barcodes }, includes: SOURCE_PLATE_INCLUDES) + end + + # Returns the source wells for pooling. The wells are filtered using the + # well_filter. + # + # @return [Array] An array of source wells for pooling. + def source_wells_for_pooling + well_filter.filtered.map(&:first) # The first element is the well. + end + + # Returns a hash mapping each source well to its source plate. The hash + # contains all source wells independent of the filtering. + # + # @return [Hash] A hash where the keys are wells and the values are the plates + # that each well belongs to. + def source_wells_to_plates + @source_wells_to_plates ||= + source_plates.each_with_object({}) { |plate, hash| plate.wells.each { |well| hash[well] = plate } } + end + + # Returns the pools for the destination plate. + # + # @return [Array] An array of pools. + def pools + @pools ||= build_pools + end + + # Sets the barcodes and minimal_barcodes instance variables. The + # minimal_barcodes are derived from the barcodes by removing any blank + # values and stripping whitespace from the remaining values. + # + # @param barcodes [Array] An array of barcodes. + def barcodes=(barcodes) + @barcodes = barcodes + @minimal_barcodes = barcodes.compact_blank.map(&:strip) + end + + # Returns the number of pools based on the sample count from the lookup + # table. + # + # @return [Integer] The number of pools. + def number_of_pools + id = purpose_config.dig(:creator_class, :args, :pooling) + Settings.poolings[id][:number_of_pools][source_wells_for_pooling.count] || default_number_of_pools + end + + # Creates transfer requests from source wells to the destination plate in + # Sequencescape. + # + # @param dest_uuid [String] The UUID of the destination plate. + # @return [Boolean] Returns true if no exception is raised. + def transfer_material_from_parent!(dest_uuid) + dest_plate = Sequencescape::Api::V2::Plate.find_by(uuid: dest_uuid) + api.transfer_request_collection.create!( + user: user_uuid, + transfer_requests: transfer_request_attributes(dest_plate) + ) + true + end + + # Generates the attributes for transfer requests from the source wells to the + # destination plate. + # + # @param dest_plate [Sequencescape::Api::V2::Plate] The destination plate. + # @return [Array] An array of hashes, each representing the attributes + # for a transfer request. + def transfer_request_attributes(dest_plate) + well_filter.filtered.filter_map do |source_well, additional_parameters| + request_hash(source_well, dest_plate, additional_parameters) + end + end + + # Generates a hash representing a transfer request from a source well to a + # destination well. Additional parameters generated by the well filter are + # merged into the request hash, i.e.'outer_request' and 'submission_id'. + # tag_depth is added to the aliquot attributes. + # + # @param source_well [Sequencescape::Api::V2::Well] The source well. + # @param dest_plate [Sequencescape::Api::V2::Plate] The destination plate. + # @param additional_parameters [Hash] Additional parameters to include. + # @return [Hash] A hash representing a transfer request. + def request_hash(source_well, dest_plate, additional_parameters) + dest_location = transfer_hash[source_well][:dest_locn] + { + 'source_asset' => source_well.uuid, + 'target_asset' => dest_plate.well_at_location(dest_location)&.uuid, + :aliquot_attributes => { + 'tag_depth' => tag_depth_hash[source_well] + } + }.merge(additional_parameters) + end + + # Returns a mapping between each source well to a destination location. + # + # @return [Hash] A hash where each key is a source well and each value is a + # hash with a single key-value pair: { dest_locn: destination_location }. + def transfer_hash + @transfer_hash ||= + pools + .each_with_index + .with_object({}) do |(pool, index), result| + dest_location = WellHelpers.well_at_column_index(index) # column order, 96 wells + pool.each { |source_well| result[source_well] = { dest_locn: dest_location } } + end + end + + # Returns a hash mapping each source well to its index in its pool plus one. + # The tag depth is used as an aliquot attribute in the transfer request. It + # is recorded in Sequencescape to avoid tag clashes. + # + # @return [Hash] A hash where keys are wells and values are tag depths. + def tag_depth_hash + @tag_depth_hash ||= + pools + .each_with_index + .with_object({}) do |(pool, _pool_index), hash| + pool.each_with_index { |well, index| hash[well] = (index + 1).to_s } + end + end + + # Builds the pools for the destination plate. The wells are first grouped + # by study and project, then split by donor_ids, and finally distributed + # across pools. + # + # @return [Array>] An array of well groups distributed across pools. + def build_pools + groups = split_single_group_by_study_and_project(source_wells_for_pooling) + groups = split_groups_by_unique_donor_ids(groups) + distribute_groups_across_pools(groups, number_of_pools) + end + end +end diff --git a/app/sequencescape/sequencescape/api/v2/plate.rb b/app/sequencescape/sequencescape/api/v2/plate.rb index bd0cc947a..791106cc8 100644 --- a/app/sequencescape/sequencescape/api/v2/plate.rb +++ b/app/sequencescape/sequencescape/api/v2/plate.rb @@ -72,6 +72,15 @@ def wells_in_columns @wells_in_columns ||= wells.sort_by(&:coordinate) end + # Returns the well at a specified location. + # + # @param well_location [String] The location to find the well at. + # @return [Well, nil] The well at the specified location, or `nil` if no + # well is found at that location. + def well_at_location(well_location) + wells.detect { |well| well.location == well_location } + end + def tagged? wells.any?(&:tagged?) end diff --git a/app/views/plate_creation/donor_pooling_plate.html.erb b/app/views/plate_creation/donor_pooling_plate.html.erb new file mode 100644 index 000000000..20880ff09 --- /dev/null +++ b/app/views/plate_creation/donor_pooling_plate.html.erb @@ -0,0 +1,50 @@ +<%= page('donor-pooling-page') do %> + <%= content do %> + <%= card id: 'main-content' do %> +
+

Instructions

+

This step allows multiple source plates for pooling together into a destination plate.

+

This process checks if pooling is possible under the following conditions:

+
    +
  • All source wells with aliquots must have donor IDs.
  • +
  • Combining samples from different studies or projects in the same pool is not allowed.
  • +
  • Including samples with the same donor ID in the same pool is not allowed.
  • +
  • The number of pools must not exceed the number configured for the samples.
  • +
+

Scan the source plate barcodes into the panel on the right of the screen (the order doesn't matter) and click Create Plate.

+
+ <% end %> + <% end %> + <%= sidebar do %> + <%= card title: 'Add plates to pool', without_block: true do %> +
+
+
+

Scan in up to <% @labware_creator.max_number_of_source_plates %> plates.

+
+
+ <% end %> +
+ <%= form_for( + @labware_creator, + url: limber_plate_children_path(@labware_creator.parent_uuid), + as: :plate, + html: { method: :post }) do |form| %> + + <%= form.hidden_field :parent_uuid %> + <%= form.hidden_field :purpose_uuid %> + <% @labware_creator.max_number_of_source_plates.times do |index| %> +
+ + +
+ <% end %> + <%= form.submit 'Create Plate', class: 'btn btn-success btn-lg', id: 'create-labware' %> +
+ <% end %> +<% end %> \ No newline at end of file diff --git a/config/poolings/interim_scrna_core_donor_pooling.yml b/config/poolings/interim_scrna_core_donor_pooling.yml new file mode 100644 index 000000000..5c9a015fe --- /dev/null +++ b/config/poolings/interim_scrna_core_donor_pooling.yml @@ -0,0 +1,102 @@ +--- +# A copy of cardinal pooling is used until a new configuration is provided. +# The maxiumum number of samples is 96 in this file. +interim_scrna_core_donor_pooling: + # Map number of samples to number of pools. + number_of_pools: + 96: 8 + 95: 8 + 94: 8 + 93: 8 + 92: 8 + 91: 8 + 90: 8 + 89: 8 + 88: 8 + 87: 7 + 86: 7 + 85: 7 + 84: 7 + 83: 7 + 82: 7 + 81: 7 + 80: 7 + 79: 7 + 78: 7 + 77: 7 + 76: 6 + 75: 6 + 74: 6 + 73: 6 + 72: 6 + 71: 6 + 70: 6 + 69: 6 + 68: 6 + 67: 6 + 66: 6 + 65: 5 + 64: 5 + 63: 5 + 62: 5 + 61: 5 + 60: 5 + 59: 5 + 58: 5 + 57: 5 + 56: 5 + 55: 5 + 54: 5 + 53: 5 + 52: 4 + 51: 4 + 50: 4 + 49: 4 + 48: 4 + 47: 4 + 46: 4 + 45: 4 + 44: 4 + 43: 4 + 42: 4 + 41: 4 + 40: 4 + 39: 3 + 38: 3 + 37: 3 + 36: 3 + 35: 3 + 34: 3 + 33: 3 + 32: 3 + 31: 3 + 30: 3 + 29: 3 + 28: 3 + 27: 3 + 26: 2 + 25: 2 + 24: 2 + 23: 2 + 22: 2 + 21: 2 + 20: 1 + 19: 1 + 18: 1 + 17: 1 + 16: 1 + 15: 1 + 14: 1 + 13: 1 + 12: 1 + 11: 1 + 10: 1 + 9: 1 + 8: 1 + 7: 1 + 6: 1 + 5: 1 + 4: 1 + 3: 1 + 2: 1 + 1: 1 diff --git a/config/purposes/scrna_core_cdna_prep.wip.yml b/config/purposes/scrna_core_cdna_prep.wip.yml index 4936423fe..0fdb63ae5 100644 --- a/config/purposes/scrna_core_cdna_prep.wip.yml +++ b/config/purposes/scrna_core_cdna_prep.wip.yml @@ -47,6 +47,24 @@ LRC PBMC Defrost PBS: # This plate has come from the LRC PBMC Defrost PBS plate, in SeqOps. LRC PBMC Pools: :asset_type: plate + :creator_class: + name: LabwareCreators::DonorPoolingPlate + args: + # If the the lookup table in pooling configuration does not specify the + # number of pools for a given number of samples, this value will be used. + default_number_of_pools: 16 + # The maximum number of source plates that can be used to create the pool. + # This is also used for bed verification of the source plates. + max_number_of_source_plates: 2 + # The name of the pooling configuration to use. This configuration + # contains the lookup table mapping the number of samples to the number + # of pools. The pooling configurations are stored in the config/poolings/ + # directory. The top level key in the configuration file is the name of the + # pooling configuration. A copy of cardinal pooling is used until a new + # configuration is provided. + pooling: interim_scrna_core_donor_pooling + :state_changer_class: StateChangers::AutomaticPlateStateChanger + :work_completion_request_type: 'limber_scrna_core_donor_pooling' :stock_plate: false :input_plate: false # Plate containing pooled PBMCs from different donors. diff --git a/docs/creators.md b/docs/creators.md index 9e2582df1..1f0de5af1 100644 --- a/docs/creators.md +++ b/docs/creators.md @@ -61,6 +61,16 @@ Labware creators are responsible for creating new labware from a parent labware. {LabwareCreators::CustomTaggedPlate View class documentation} +## LabwareCreators::DonorPoolingPlate + +{include:LabwareCreators::DonorPoolingPlate} + + Used directly in 1 purposes: + LRC PBMC Pools + +{LabwareCreators::DonorPoolingPlate View class documentation} + + ## LabwareCreators::FinalTube {include:LabwareCreators::FinalTube} diff --git a/docs/state_changers.md b/docs/state_changers.md index c03ddbeaf..8235d005c 100644 --- a/docs/state_changers.md +++ b/docs/state_changers.md @@ -18,8 +18,8 @@ manual transfer. {include:StateChangers::DefaultStateChanger} - Used directly in 185 purposes: - CLCM DNA End Prep, CLCM DNA Lib PCR, CLCM DNA Lib PCR XP, CLCM Lysate DNA, CLCM Lysate RNA, CLCM RNA End Prep, CLCM RNA Lib PCR, CLCM RNA Lib PCR XP, CLCM RT PreAmp, CLCM Stock, GBS PCR1, GBS PCR2, GBS Stock, GBS-96 Stock, GnT MDA Norm, GnT Pico End Prep, GnT Pico Lib PCR, GnT Pico-XP, GnT scDNA, GnT Stock, Heron Lysed Tube Rack, LB Cap Lib, LB Cap Lib PCR, LB Cap Lib PCR-XP, LB Cap Lib Pool, LB cDNA, LB cDNA XP, LB Cherrypick, LB End Prep, LB Hyb, LB Lib PCR, LB Lib PCR-XP, LB Lib PrePool, LB Post Shear, LB Shear, LBB Cherrypick, LBB Chromium Tagged, LBB Enriched BCR, LBB Enriched BCR HT, LBB Enriched TCR, LBB Enriched TCR HT, LBB Lib PCR-XP, LBB Lib-XP, LBB Ligation, LBB Ligation Tagged, LBC 3pV3 GEX Dil, LBC 3pV3 GEX Frag 2XP, LBC 3pV3 GEX LigXP, LBC 3pV3 GEX PCR 2XP, LBC 5p GEX Dil, LBC 5p GEX Frag 2XP, LBC 5p GEX LigXP, LBC 5p GEX PCR 2XP, LBC Aggregate, LBC BCR Dil 1, LBC BCR Dil 2, LBC BCR Enrich1 2XSPRI, LBC BCR Enrich2 2XSPRI, LBC BCR Post Lig 1XSPRI, LBC BCR Post PCR, LBC Stock, LBC TCR Dil 1, LBC TCR Dil 2, LBC TCR Enrich1 2XSPRI, LBC TCR Enrich2 2XSPRI, LBC TCR Post Lig 1XSPRI, LBC TCR Post PCR, LBR Cherrypick, LBR Frag, LBR Frag cDNA, LBR Globin, LBR Globin DNase, LBR mRNA Cap, LBR Ribo DNase, LBR RiboGlobin DNase, LBSN-384 PCR 1, LBSN-384 PCR 2, LCA 10X cDNA, LCA Blood Array, LCA Blood Bank, LCA Connect PCRXP, LCA PBMC, LCA PBMC Bank, LCA PBMC Pools, LCMB Cherrypick, LCMB End Prep, LCMB Lib PCR, LCMB Lib PCR-XP, LDS AL Lib, LDS AL Lib Dil, LDS Cherrypick, LDS Lib PCR, LDS Lib PCR XP, LDS Stock, LDS Stock XP, LDW-96 Stock, LHR End Prep, LHR Lib PCR, LHR PCR 1, LHR PCR 2, LHR RT, LHR XP, LHR-384 AL Lib, LHR-384 cDNA, LHR-384 End Prep, LHR-384 Lib PCR, LHR-384 PCR 1, LHR-384 PCR 2, LHR-384 RT, LHR-384 XP, LILYS-96 Stock, LRC Blood Bank, LRC HT 5p cDNA PCR, LRC HT 5p cDNA PCR XP, LRC HT 5p Chip, LRC HT 5p GEMs, LRC PBMC Bank, LRC PBMC Cryostor, LRC PBMC Defrost PBS, LRC PBMC Pools, LRC PBMC Pools Input, LTHR Cherrypick, LTHR Lib PCR 1, LTHR Lib PCR 2, LTHR Lib PCR pool, LTHR PCR 1, LTHR PCR 2, LTHR RT, LTHR RT-S, LTHR-384 Lib PCR 1, LTHR-384 Lib PCR 2, LTHR-384 Lib PCR pool, LTHR-384 PCR 1, LTHR-384 PCR 2, LTHR-384 RT, LTHR-384 RT-Q, LTN AL Lib, LTN AL Lib Dil, LTN Cherrypick, LTN Lib PCR, LTN Lib PCR XP, LTN Post Shear, LTN Shear, LTN Stock, LTN Stock XP, PF Cherrypicked, PF End Prep, PF Lib, PF Lib Q-XP2, PF Lib XP, PF Lib XP2, PF Post Shear, PF Post Shear XP, PF Shear, PF-384 End Prep, PF-384 Lib, PF-384 Lib XP2, PF-384 Post Shear XP, pWGS-384 AL Lib, pWGS-384 End Prep, pWGS-384 Lib PCR, pWGS-384 Post Shear XP, RVI Cap Lib, RVI Cap Lib PCR, RVI Cap Lib PCR XP, RVI Cap Lib Pool, RVI cDNA XP, RVI Cherrypick, RVI Hyb, RVI Lib PCR, RVI Lib PCR XP, RVI Lib PrePool, RVI Lig Bind, RVI RT, scRNA cDNA-XP, scRNA End Prep, scRNA Lib PCR, scRNA Stock, scRNA-384 cDNA-XP, scRNA-384 End Prep, scRNA-384 Lib PCR, scRNA-384 Stock, Tag Plate - 384, TR Stock 48, and TR Stock 96 + Used directly in 184 purposes: + CLCM DNA End Prep, CLCM DNA Lib PCR, CLCM DNA Lib PCR XP, CLCM Lysate DNA, CLCM Lysate RNA, CLCM RNA End Prep, CLCM RNA Lib PCR, CLCM RNA Lib PCR XP, CLCM RT PreAmp, CLCM Stock, GBS PCR1, GBS PCR2, GBS Stock, GBS-96 Stock, GnT MDA Norm, GnT Pico End Prep, GnT Pico Lib PCR, GnT Pico-XP, GnT scDNA, GnT Stock, Heron Lysed Tube Rack, LB Cap Lib, LB Cap Lib PCR, LB Cap Lib PCR-XP, LB Cap Lib Pool, LB cDNA, LB cDNA XP, LB Cherrypick, LB End Prep, LB Hyb, LB Lib PCR, LB Lib PCR-XP, LB Lib PrePool, LB Post Shear, LB Shear, LBB Cherrypick, LBB Chromium Tagged, LBB Enriched BCR, LBB Enriched BCR HT, LBB Enriched TCR, LBB Enriched TCR HT, LBB Lib PCR-XP, LBB Lib-XP, LBB Ligation, LBB Ligation Tagged, LBC 3pV3 GEX Dil, LBC 3pV3 GEX Frag 2XP, LBC 3pV3 GEX LigXP, LBC 3pV3 GEX PCR 2XP, LBC 5p GEX Dil, LBC 5p GEX Frag 2XP, LBC 5p GEX LigXP, LBC 5p GEX PCR 2XP, LBC Aggregate, LBC BCR Dil 1, LBC BCR Dil 2, LBC BCR Enrich1 2XSPRI, LBC BCR Enrich2 2XSPRI, LBC BCR Post Lig 1XSPRI, LBC BCR Post PCR, LBC Stock, LBC TCR Dil 1, LBC TCR Dil 2, LBC TCR Enrich1 2XSPRI, LBC TCR Enrich2 2XSPRI, LBC TCR Post Lig 1XSPRI, LBC TCR Post PCR, LBR Cherrypick, LBR Frag, LBR Frag cDNA, LBR Globin, LBR Globin DNase, LBR mRNA Cap, LBR Ribo DNase, LBR RiboGlobin DNase, LBSN-384 PCR 1, LBSN-384 PCR 2, LCA 10X cDNA, LCA Blood Array, LCA Blood Bank, LCA Connect PCRXP, LCA PBMC, LCA PBMC Bank, LCA PBMC Pools, LCMB Cherrypick, LCMB End Prep, LCMB Lib PCR, LCMB Lib PCR-XP, LDS AL Lib, LDS AL Lib Dil, LDS Cherrypick, LDS Lib PCR, LDS Lib PCR XP, LDS Stock, LDS Stock XP, LDW-96 Stock, LHR End Prep, LHR Lib PCR, LHR PCR 1, LHR PCR 2, LHR RT, LHR XP, LHR-384 AL Lib, LHR-384 cDNA, LHR-384 End Prep, LHR-384 Lib PCR, LHR-384 PCR 1, LHR-384 PCR 2, LHR-384 RT, LHR-384 XP, LILYS-96 Stock, LRC Blood Bank, LRC HT 5p cDNA PCR, LRC HT 5p cDNA PCR XP, LRC HT 5p Chip, LRC HT 5p GEMs, LRC PBMC Bank, LRC PBMC Cryostor, LRC PBMC Defrost PBS, LRC PBMC Pools Input, LTHR Cherrypick, LTHR Lib PCR 1, LTHR Lib PCR 2, LTHR Lib PCR pool, LTHR PCR 1, LTHR PCR 2, LTHR RT, LTHR RT-S, LTHR-384 Lib PCR 1, LTHR-384 Lib PCR 2, LTHR-384 Lib PCR pool, LTHR-384 PCR 1, LTHR-384 PCR 2, LTHR-384 RT, LTHR-384 RT-Q, LTN AL Lib, LTN AL Lib Dil, LTN Cherrypick, LTN Lib PCR, LTN Lib PCR XP, LTN Post Shear, LTN Shear, LTN Stock, LTN Stock XP, PF Cherrypicked, PF End Prep, PF Lib, PF Lib Q-XP2, PF Lib XP, PF Lib XP2, PF Post Shear, PF Post Shear XP, PF Shear, PF-384 End Prep, PF-384 Lib, PF-384 Lib XP2, PF-384 Post Shear XP, pWGS-384 AL Lib, pWGS-384 End Prep, pWGS-384 Lib PCR, pWGS-384 Post Shear XP, RVI Cap Lib, RVI Cap Lib PCR, RVI Cap Lib PCR XP, RVI Cap Lib Pool, RVI cDNA XP, RVI Cherrypick, RVI Hyb, RVI Lib PCR, RVI Lib PCR XP, RVI Lib PrePool, RVI Lig Bind, RVI RT, scRNA cDNA-XP, scRNA End Prep, scRNA Lib PCR, scRNA Stock, scRNA-384 cDNA-XP, scRNA-384 End Prep, scRNA-384 Lib PCR, scRNA-384 Stock, Tag Plate - 384, TR Stock 48, and TR Stock 96 {StateChangers::DefaultStateChanger View class documentation} @@ -28,8 +28,8 @@ manual transfer. {include:StateChangers::AutomaticPlateStateChanger} - Used directly in 3 purposes: - LBC Cherrypick, LBSN-96 Lysate, and LSW-96 Stock + Used directly in 4 purposes: + LBC Cherrypick, LBSN-96 Lysate, LRC PBMC Pools, and LSW-96 Stock {StateChangers::AutomaticPlateStateChanger View class documentation} diff --git a/lib/config_loader/poolings_loader.rb b/lib/config_loader/poolings_loader.rb new file mode 100644 index 000000000..2c585a7a8 --- /dev/null +++ b/lib/config_loader/poolings_loader.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +require_relative 'base' + +module ConfigLoader + # Loads the pooling configurations + class PoolingsLoader < ConfigLoader::Base + self.config_folder = 'poolings' + + attr_reader :config + end +end diff --git a/lib/tasks/config.rake b/lib/tasks/config.rake index 3fa8939e1..938618bbe 100644 --- a/lib/tasks/config.rake +++ b/lib/tasks/config.rake @@ -2,6 +2,7 @@ require_relative '../purpose_config' require_relative '../config_loader/purposes_loader' +require_relative '../config_loader/poolings_loader' namespace :config do desc 'Generates a configuration file for the current Rails environment' @@ -84,6 +85,12 @@ namespace :config do end configuration[:submission_templates] = submission_templates + + # Load pooling configurations from config/poolings directory. + # After running the config:generate task, they will be available in the + # code, for example Settings.poolings['scrna_core_donor_pooling'] + puts 'Preparing pooling configurations ...' + configuration[:poolings] = ConfigLoader::PoolingsLoader.new.config end # Write out the current environment configuration file diff --git a/spec/factories/pooling_factories.rb b/spec/factories/pooling_factories.rb new file mode 100644 index 000000000..6fcacee75 --- /dev/null +++ b/spec/factories/pooling_factories.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +FactoryBot.define do + # This factory creates a pooling configuration. The created object is a Hash + # representing the configuration. The configuration is loaded from a YAML + # file specified by the name transient attribute. The name attribute is the + # filename without the extension in spec/fixtures/config/poolings/ + # directory. By default, the name is 'donor_pooling', and the factory loads + # the configuration from 'donor_pooling.yml'. The loaded configuration is + # assigned to a specific key in the `Settings.poolings` hash when the object + # is created. The key is the same as the name transient attribute. This + # factory includes a subfactory named `donor_pooling_config` to make it more + # specific. + # + # Example usage: + # config = create(:pooling_config, name: 'donor_pooling') + # config = create(:donor_pooling_config) + # + # After running the above code, `Settings.poolings['donor_pooling']` will hold + # the configuration loaded from 'donor_pooling.yml'. The Settings object will + # be available with the specifid config in the tests. + factory :pooling_config, class: Hash do + transient do + name { 'donor_pooling' } # Default name + end + + pooling { YAML.load_file(Rails.root.join('spec/fixtures/config/poolings/', "#{name}.yml")) } + + # Initialise the instance that the factory creates. It assigns the pooling + # configuration to a specific key in the Settings.poolings hash and then + # returns the the pooling configuration. + initialize_with do + Settings.poolings ||= {} + Settings.poolings.merge!(pooling) + pooling + end + + # Override the to_create method to prevent save! when using the factory. + # This makes the create method to behave the same as build method. + to_create do + # Overridden to prevent calling save! on a Hash + end + + # Use a specific name to create a donor pooling config. + factory :donor_pooling_config do + name { 'donor_pooling' } + end + end +end diff --git a/spec/factories/purpose_config_factories.rb b/spec/factories/purpose_config_factories.rb index 632ab4e3c..54637186c 100644 --- a/spec/factories/purpose_config_factories.rb +++ b/spec/factories/purpose_config_factories.rb @@ -267,6 +267,22 @@ end end + factory :donor_pooling_plate_purpose_config do + transient { default_number_of_pools { 16 } } + transient { max_number_of_source_plates { 2 } } + transient { pooling { 'donor_pooling' } } + creator_class do + { + name: 'LabwareCreators::DonorPoolingPlate', + args: { + default_number_of_pools: default_number_of_pools, + max_number_of_source_plates: max_number_of_source_plates, + pooling: pooling + } + } + end + end + # Basic tube purpose configuration factory :tube_config do asset_type { 'tube' } diff --git a/spec/fixtures/config/poolings/donor_pooling.yml b/spec/fixtures/config/poolings/donor_pooling.yml new file mode 100644 index 000000000..ecc363fdd --- /dev/null +++ b/spec/fixtures/config/poolings/donor_pooling.yml @@ -0,0 +1,101 @@ +--- +# Unique name of the pooling configuration. +donor_pooling: + # Maping between number of samples to number of pools. + number_of_pools: + 96: 8 + 95: 8 + 94: 8 + 93: 8 + 92: 8 + 91: 8 + 90: 8 + 89: 8 + 88: 8 + 87: 7 + 86: 7 + 85: 7 + 84: 7 + 83: 7 + 82: 7 + 81: 7 + 80: 7 + 79: 7 + 78: 7 + 77: 7 + 76: 6 + 75: 6 + 74: 6 + 73: 6 + 72: 6 + 71: 6 + 70: 6 + 69: 6 + 68: 6 + 67: 6 + 66: 6 + 65: 5 + 64: 5 + 63: 5 + 62: 5 + 61: 5 + 60: 5 + 59: 5 + 58: 5 + 57: 5 + 56: 5 + 55: 5 + 54: 5 + 53: 5 + 52: 4 + 51: 4 + 50: 4 + 49: 4 + 48: 4 + 47: 4 + 46: 4 + 45: 4 + 44: 4 + 43: 4 + 42: 4 + 41: 4 + 40: 4 + 39: 3 + 38: 3 + 37: 3 + 36: 3 + 35: 3 + 34: 3 + 33: 3 + 32: 3 + 31: 3 + 30: 3 + 29: 3 + 28: 3 + 27: 3 + 26: 2 + 25: 2 + 24: 2 + 23: 2 + 22: 2 + 21: 2 + 20: 1 + 19: 1 + 18: 1 + 17: 1 + 16: 1 + 15: 1 + 14: 1 + 13: 1 + 12: 1 + 11: 1 + 10: 1 + 9: 1 + 8: 1 + 7: 1 + 6: 1 + 5: 1 + 4: 1 + 3: 1 + 2: 1 + 1: 1 diff --git a/spec/fixtures/config/poolings/second_pooling_config.yml b/spec/fixtures/config/poolings/second_pooling_config.yml new file mode 100644 index 000000000..1cb459e86 --- /dev/null +++ b/spec/fixtures/config/poolings/second_pooling_config.yml @@ -0,0 +1,8 @@ +--- +# This test file is for testing multiple config file loading. +second_pooling_config: + number_of_pools: + 1: 1 + 2: 2 + 3: 3 + 4: 4 diff --git a/spec/lib/config_loader/poolings_loader_spec.rb b/spec/lib/config_loader/poolings_loader_spec.rb new file mode 100644 index 000000000..d6cee5e16 --- /dev/null +++ b/spec/lib/config_loader/poolings_loader_spec.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +require 'config_loader/poolings_loader' + +RSpec.describe ConfigLoader::PoolingsLoader, type: :model, loader: true do + subject(:loader) { described_class.new(directory: test_directory, files: selected_files) } + + let(:test_directory) { Rails.root.join('spec/fixtures/config/poolings') } + + context 'with no files specified' do + let(:selected_files) { nil } + + it 'loads purposes from all files' do + expect(loader.config.length).to eq 2 + expect(loader.config).to be_a(Hash) + expect(loader.config.keys).to include('donor_pooling', 'second_pooling_config') + expect(loader.config.dig('donor_pooling', 'number_of_pools')&.size).to eq(96) + p loader.config.dig('second_pooling_config', 'number_of_pools')&.size + expect(loader.config.dig('second_pooling_config', 'number_of_pools')&.size).to eq(4) + end + end + + context 'with a specific file specified' do + let(:selected_files) { 'donor_pooling' } + + it 'loads purposes from specified files' do + expect(loader.config.length).to eq 1 + expect(loader.config).to be_a(Hash) + expect(loader.config.dig('donor_pooling', 'number_of_pools')&.size).to eq(96) + expect(loader.config['donor_pooling']['number_of_pools'][1]).to eq(1) + expect(loader.config['donor_pooling']['number_of_pools'][96]).to eq(8) + end + end +end diff --git a/spec/models/labware_creators/donor_pooling_spec.rb b/spec/models/labware_creators/donor_pooling_spec.rb new file mode 100644 index 000000000..7517de27e --- /dev/null +++ b/spec/models/labware_creators/donor_pooling_spec.rb @@ -0,0 +1,687 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'labware_creators/base' +require_relative 'shared_examples' + +RSpec.describe LabwareCreators::DonorPoolingPlate do + it_behaves_like 'it only allows creation from plates' + it_behaves_like 'it has a custom page', 'donor_pooling_plate' + + has_a_working_api + + subject { described_class.new(api, form_attributes) } + let(:user_uuid) { 'user-uuid' } + let(:parent_1_plate_uuid) { 'parent-1-plate-uuid' } + let(:parent_2_plate_uuid) { 'parent-2-plate-uuid' } + let(:parent_purpose_uuid) { 'parent-purpose-uuid' } + let(:child_purpose_uuid) { 'child-purpose-uuid' } + let(:child_plate_uuid) { 'child-plate-uuid' } + let(:requests) { create_list(:request, 96, submission_id: 1) } + + let(:parent_1_plate) do + # The aliquots_without_requests parameter is to prevent the default + # request creation so we can use the same submission_id on requests. + plate = create(:v2_plate, uuid: parent_1_plate_uuid, aliquots_without_requests: 1) + plate.wells.each_with_index do |well, index| + well.aliquots.first.request = requests[index] + well.aliquots.first.sample.sample_metadata.donor_id = nil + end + plate + end + + let(:parent_2_plate) do + # The aliquots_without_requests parameter is to prevent the default + # request creation so we can use the same submission_id on requests. + plate = create(:v2_plate, uuid: parent_2_plate_uuid, aliquots_without_requests: 1) + plate.wells.each_with_index do |well, index| + well.aliquots.first.request = requests[index] + well.aliquots.first.sample.sample_metadata.donor_id = nil + end + plate + end + let(:source_plates) { [parent_1_plate, parent_2_plate] } + + let(:child_plate) { create(:v2_plate, uuid: child_plate_uuid) } + + # Usually we need three studies for testing. + let(:study_1) { create(:v2_study, name: 'study-1-name') } + let(:study_2) { create(:v2_study, name: 'study-2-name') } + let(:study_3) { create(:v2_study, name: 'study-3-name') } + + # Usually we need three projects for testing. + let(:project_1) { create(:v2_project, name: 'project-1-name') } + let(:project_2) { create(:v2_project, name: 'project-2-name') } + let(:project_3) { create(:v2_project, name: 'project-3-name') } + + # This is the form that includes plate barcodes, submitted by user. + let(:form_attributes) do + { purpose_uuid: child_purpose_uuid, parent_uuid: parent_1_plate_uuid, barcodes: barcodes, user_uuid: user_uuid } + end + let(:barcodes) { source_plates.map(&:human_barcode) } + + let(:default_number_of_pools) { 16 } + + before do + # Create the pooling config and add to Settings. + create(:donor_pooling_config) + + # Create the plate purpose config and add to Settings. + create( + :donor_pooling_plate_purpose_config, + uuid: child_purpose_uuid, + default_number_of_pools: default_number_of_pools + ) + + # Allow the API call to return two plates by default. + allow(Sequencescape::Api::V2::Plate).to receive(:find_all) + .with({ barcode: barcodes }, includes: described_class::SOURCE_PLATE_INCLUDES) + .and_return(source_plates) + end + + describe '.attributes' do + it 'includes barcodes' do + expect(described_class.attributes).to include(a_hash_including(barcodes: [])) + end + end + + describe '#max_number_of_source_plates' do + it 'returns the number of source plates' do + expect(subject.max_number_of_source_plates).to eq(2) + end + + context 'with a different number of source plates' do + before { create(:donor_pooling_plate_purpose_config, uuid: child_purpose_uuid, max_number_of_source_plates: 3) } + + it 'returns the number of source plates' do + expect(subject.max_number_of_source_plates).to eq(3) + end + end + end + + describe '#well_filter' do + it 'returns a WellFilter with the creator set to self' do + well_filter = subject.well_filter + expect(well_filter).to be_a(LabwareCreators::WellFilter) + expect(well_filter.creator).to eq(subject) + end + + it 'returns the same instance' do + expect(subject.well_filter).to be(subject.well_filter) + end + end + + describe '#labware_wells' do + it 'returns the passed wells from the source plates' do + parent_1_plate.wells[0].state = 'passed' + parent_2_plate.wells[0].state = 'passed' + expect(subject.labware_wells).to eq([parent_1_plate.wells[0], parent_2_plate.wells[0]]) + end + end + + describe '#source_plates' do + it 'returns the source plates' do + subject.barcodes = barcodes + expect(subject.source_plates).to eq([parent_1_plate, parent_2_plate]) + end + end + + describe '#source_wells_for_pooling' do + it 'returns the filtered wells from the source plates' do + parent_1_plate.wells[0].state = 'passed' + parent_2_plate.wells[0].state = 'passed' + expect(subject.source_wells_for_pooling).to eq([parent_1_plate.wells[0], parent_2_plate.wells[0]]) + end + end + + describe '#source_wells_to_plates' do + it 'returns a hash mapping source wells to their plates' do + hash = subject.source_wells_to_plates + expect(hash[parent_1_plate.wells.first]).to eq(parent_1_plate) + expect(hash[parent_1_plate.wells.last]).to eq(parent_1_plate) + expect(hash[parent_2_plate.wells.first]).to eq(parent_2_plate) + expect(hash[parent_2_plate.wells.last]).to eq(parent_2_plate) + expect(hash.size).to eq(parent_1_plate.wells.size + parent_2_plate.wells.size) + end + + it 'caches the result' do + expect(subject.source_wells_to_plates).to be(subject.source_wells_to_plates) # same instance + end + end + + describe '#barcodes=' do + it 'sets the barcodes' do + expect(subject.barcodes).to eq(barcodes) + expect(subject.minimal_barcodes).to eq(barcodes) + end + + it 'cleans the barcodes' do + new_barcodes = barcodes.map { |barcode| "\r\n\t\v\f #{barcode} \r\n\t\v\f" } + ['', " \r\n\t\v\f ", nil] + subject.barcodes = new_barcodes + expect(subject.barcodes).to eq(new_barcodes) + expect(subject.minimal_barcodes).to eq(barcodes) + end + end + + describe '#number_of_pools' do + # TODO: Change this test once a new CSV file is provided. + context 'when number of samples is less than or equal to 96' do + it 'returns the number of pools from lookup table' do + { + 1 => 1, + 21 => 2, + 27 => 3, + 40 => 4, + 53 => 5, + 66 => 6, + 77 => 7, + 88 => 8, + 96 => 8 + }.each do |number_of_samples, number_of_pools| + parent_1_plate.wells[0..(number_of_samples - 1)].each { |well| well.state = 'passed' } + subject.well_filter.instance_variable_set(:@well_transfers, nil) # reset well_filter cache + expect(subject.number_of_pools).to eq(number_of_pools) + end + end + end + + context 'when number of samples is greater than 96' do + it 'returns the number of pools from constant' do + parent_1_plate.wells[0..96].each { |well| well.state = 'passed' } + { 97 => default_number_of_pools, 160 => default_number_of_pools }.each do |number_of_samples, number_of_pools| + parent_2_plate.wells[0..(number_of_samples - 97)].each { |well| well.state = 'passed' } + subject.well_filter.instance_variable_set(:@well_transfers, nil) # reset well_filter cache + expect(subject.number_of_pools).to eq(number_of_pools) + end + end + end + end + + describe '#split_single_group_by_study_and_project' do + it 'returns the grouped wells' do + well_p1_w1 = well = parent_1_plate.wells[0] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + + well_p1_w2 = well = parent_1_plate.wells[1] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + + well_p1_w3 = well = parent_1_plate.wells[2] + well.state = 'passed' + well.aliquots.first.study = study_2 + well.aliquots.first.project = project_1 + + well_p1_w4 = well = parent_1_plate.wells[3] + well.state = 'passed' + well.aliquots.first.study = study_2 + well.aliquots.first.project = project_2 + + well_p2_w1 = well = parent_2_plate.wells[0] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + + well_p2_w2 = well = parent_2_plate.wells[1] + well.state = 'passed' + well.aliquots.first.study = study_2 + well.aliquots.first.project = project_2 + + groups = [ + [well_p1_w1, well_p1_w2, well_p2_w1], # study_1, project_1 + [well_p1_w3], # study_2, project_1 + [well_p1_w4, well_p2_w2] # study_2, project_2 + ] + expect(subject.split_single_group_by_study_and_project(groups.flatten)).to eq(groups) + end + end + + describe '#split_single_group_by_unique_donor_ids' do + it 'returns the split groups' do + well_p1_w1 = well = parent_1_plate.wells[0] + well.state = 'passed' + well.aliquots.first.sample.sample_metadata.donor_id = 1 # Using integer donor_ids for easy setup. + + well_p1_w2 = well = parent_1_plate.wells[1] + well.state = 'passed' + well.aliquots.first.sample.sample_metadata.donor_id = 1 + + well_p1_w3 = well = parent_1_plate.wells[2] + well.state = 'passed' + well.aliquots.first.sample.sample_metadata.donor_id = 2 + + well_p2_w1 = well = parent_2_plate.wells[0] + well.state = 'passed' + well.aliquots.first.sample.sample_metadata.donor_id = 1 + + well_p2_w2 = well = parent_2_plate.wells[1] + well.state = 'passed' + well.aliquots.first.sample.sample_metadata.donor_id = 2 + + well_p2_w3 = well = parent_2_plate.wells[2] + well.state = 'passed' + well.aliquots.first.sample.sample_metadata.donor_id = 3 + + group = [well_p1_w1, well_p1_w2, well_p1_w3, well_p2_w1, well_p2_w2, well_p2_w3] + split_groups = [ + [well_p1_w1, well_p1_w3, well_p2_w3], # donor_id 1, 2, 3 + [well_p1_w2, well_p2_w2], # donor_id 1, 2 + [well_p2_w1] # donor_id 1 + ] + expect(subject.split_single_group_by_unique_donor_ids(group)).to match_array(split_groups) + end + end + + describe '#unique_donor_ids' do + it 'returns the unique donor ids' do + well_p1_w1 = well = parent_1_plate.wells[0] + well.aliquots.first.sample.sample_metadata.donor_id = 1 # Using integer donor_ids for easy setup. + + well_p1_w2 = well = parent_1_plate.wells[1] + well.aliquots.first.sample.sample_metadata.donor_id = 1 + + well_p1_w3 = well = parent_1_plate.wells[2] + well.aliquots.first.sample.sample_metadata.donor_id = 2 + + well_p2_w1 = well = parent_2_plate.wells[0] + well.aliquots.first.sample.sample_metadata.donor_id = 1 + + well_p2_w2 = well = parent_2_plate.wells[1] + well.aliquots.first.sample.sample_metadata.donor_id = 2 + + well_p2_w3 = well = parent_2_plate.wells[2] + well.aliquots.first.sample.sample_metadata.donor_id = 3 + + group = [well_p1_w1, well_p1_w2, well_p1_w3, well_p2_w1, well_p2_w2, well_p2_w3] + unique_donor_ids = [1, 2, 3] + expect(subject.unique_donor_ids(group)).to eq(unique_donor_ids) + end + end + + describe '#distribute_groups_across_pools' do + context 'with well groups' do + it 'divides large groups' do + groups = [ + parent_1_plate.wells[1..9], # 9 wells + parent_1_plate.wells[10..15], # 6 wells + parent_2_plate.wells[16..20], # 5 wells + parent_2_plate.wells[21..21] # 1 well + ] + + # Helper method (g) to write the expected result. + wells = groups.flatten + g = proc { |*numbers| numbers.map { |number| wells[number - 1] } } + + distributed_groups = [ + g[21], + g[10, 11, 12], + g[13, 14, 15], + g[6, 7, 8, 9], + g[16, 17, 18, 19, 20], + g[1, 2, 3, 4, 5] + ] + expect(subject.distribute_groups_across_pools(groups, 6)).to match_array(distributed_groups) + end + end + + context 'when the number of groups is less than the number of pools' do + it 'divides large groups' do + # Using integers for easy reading. + groups = [[1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21]] + distributed_groups = [[21], [10, 11, 12], [13, 14, 15], [6, 7, 8, 9], [16, 17, 18, 19, 20], [1, 2, 3, 4, 5]] + expect(subject.distribute_groups_across_pools(groups, 6)).to match_array(distributed_groups) + end + end + + context 'when the number of groups is equal to the number of pools' do + it 'returns the groups intact' do + # Using integers for easy reading. + groups = [[1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21]] + expect(subject.distribute_groups_across_pools(groups, 4)).to match_array(groups) + end + end + + context 'when the number of groups is greater than the number of pools' do + it 'returns the groups intact' do + # Using integers for easy reading. + groups = [[1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21]] + expect(subject.distribute_groups_across_pools(groups, 4)).to match_array(groups) + end + end + + context 'when the number of pools is too large' do + it 'divides all groups' do + # Using integers for easy reading. + groups = [[1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21]] + distributed_groups = (1..21).map { |n| [n] } + expect(subject.distribute_groups_across_pools(groups, 25)).to match_array(distributed_groups) + end + end + end + + describe '#pools' do + let!(:wells) do # eager! + wells = [parent_1_plate.wells[0], parent_1_plate.wells[1], parent_2_plate.wells[0]] + wells.each_with_index do |well, index| + well.state = 'passed' + well.aliquots.first.study = study_1 # same study + well.aliquots.first.project = project_1 # same project + well.aliquots.first.sample.sample_metadata.donor_id = index + 1 # different donors + end + end + + it 'builds the pools' do + pools = subject.pools + expect(pools.size).to eq(1) + expect(pools[0]).to match_array(wells) + end + + it 'caches the result' do + expect(subject.pools).to be(subject.pools) # same instance + end + end + + describe '#build_pools' do + let(:studies) { create_list(:v2_study, 16) } + let(:projects) { create_list(:v2_project, 16) } + let(:donor_ids) { (1..160).to_a } + let(:wells) { parent_1_plate.wells + parent_2_plate.wells[0..63] } + + before do + wells.each_with_index do |well, index| + well.state = 'passed' + well.aliquots.first.study = studies[index % 16] + well.aliquots.first.project = projects[index % 16] + well.aliquots.first.sample.sample_metadata.donor_id = donor_ids[index] + end + end + + it 'returns correct number of pools' do + pools = subject.build_pools + expect(pools.size).to eq(default_number_of_pools) + expect(pools.flatten).to match_array(wells) + end + + it 'returns pools with correct number of studies' do + pools = subject.build_pools + pools.each do |pool| + number_of_unique_study_ids = pool.map { |well| well.aliquots.first.study.id }.uniq.size + expect(number_of_unique_study_ids).to eq(1) + end + end + + it 'returns pools with correct number of projects' do + pools = subject.build_pools + pools.each do |pool| + number_of_unique_project_ids = pool.map { |well| well.aliquots.first.project.id }.uniq.size + expect(number_of_unique_project_ids).to eq(1) + end + end + + it 'returns pools with correct number of donors' do + # Even distribution of donors across pools. + pools = subject.build_pools + pools.each do |pool| + number_of_unique_donor_ids = pool.map { |well| well.aliquots.first.sample.sample_metadata.donor_id }.uniq.size + expect(number_of_unique_donor_ids).to eq(wells.size / default_number_of_pools) + end + end + end + + describe '#transfer_request_attributes' do + let!(:wells) do # eager! + wells = [parent_1_plate.wells[0], parent_2_plate.wells[0]] + wells.each_with_index do |well, index| + well.state = 'passed' + well.aliquots.first.study = study_1 # same study + well.aliquots.first.project = project_1 # same project + well.aliquots.first.sample.sample_metadata.donor_id = index + 1 # different donors + end + end + + it 'returns the transfer request attributes into destination plate' do + attributes = subject.transfer_request_attributes(child_plate) + expect(attributes.size).to eq(2) + + expect(attributes[0]['source_asset']).to eq(wells[0].uuid) + expect(attributes[0]['target_asset']).to eq(child_plate.wells[0].uuid) + expect(attributes[0][:aliquot_attributes]).to eq({ 'tag_depth' => '1' }) + expect(attributes[0]['submission_id']).to eq('1') # request factory insists on string + + expect(attributes[1]['source_asset']).to eq(wells[1].uuid) + expect(attributes[1]['target_asset']).to eq(child_plate.wells[0].uuid) + expect(attributes[1][:aliquot_attributes]).to eq({ 'tag_depth' => '2' }) + expect(attributes[1]['submission_id']).to eq('1') # request factory insists on string + end + end + + describe '#request_hash' do + let(:wells) do + wells = [parent_1_plate.wells[0], parent_2_plate.wells[0]] + wells.each_with_index do |well, index| + well.state = 'passed' + well.aliquots.first.study = study_1 # same study + well.aliquots.first.project = project_1 # same project + well.aliquots.first.sample.sample_metadata.donor_id = index + 1 # different donors + end + end + + it 'returns the request hash' do + hash = subject.request_hash(wells[0], child_plate, { 'submission_id' => '1' }) + expect(hash['source_asset']).to eq(wells[0].uuid) + expect(hash['target_asset']).to eq(child_plate.wells[0].uuid) + expect(hash[:aliquot_attributes]).to eq({ 'tag_depth' => '1' }) + expect(hash['submission_id']).to eq('1') + + hash = subject.request_hash(wells[1], child_plate, { 'submission_id' => '1' }) + expect(hash['source_asset']).to eq(wells[1].uuid) + expect(hash['target_asset']).to eq(child_plate.wells[0].uuid) + expect(hash[:aliquot_attributes]).to eq({ 'tag_depth' => '2' }) + expect(hash['submission_id']).to eq('1') + end + end + + describe '#transfer_hash' do + let(:wells) do + wells = [parent_1_plate.wells[0], parent_1_plate.wells[1], parent_2_plate.wells[0]] + wells.each_with_index do |well, index| + well.state = 'passed' + well.aliquots.first.study = study_1 # same study + well.aliquots.first.project = project_1 # same project + well.aliquots.first.sample.sample_metadata.donor_id = index + 1 # different donors + end + end + + it 'returns the transfer hash' do + hash = wells[0..2].index_with { |_well| { dest_locn: 'A1' } } + expect(subject.transfer_hash).to eq(hash) + end + + it 'caches the result' do + expect(subject.transfer_hash).to be(subject.transfer_hash) # same instance + end + end + + describe '#tag_depth_hash' do + it 'returns a hash mapping positions of wells in their pools' do + well_p1_w1 = well = parent_1_plate.wells[0] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + well.aliquots.first.sample.sample_metadata.donor_id = 1 + + well_p1_w2 = well = parent_1_plate.wells[1] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + well.aliquots.first.sample.sample_metadata.donor_id = 2 + + well_p2_w1 = well = parent_2_plate.wells[0] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + well.aliquots.first.sample.sample_metadata.donor_id = 3 + + well_p2_w2 = well = parent_2_plate.wells[1] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + well.aliquots.first.sample.sample_metadata.donor_id = 1 # same donor as well_p1_w1 + + subject.build_pools + expect(subject.tag_depth_hash[well_p1_w1]).to eq('1') + expect(subject.tag_depth_hash[well_p1_w2]).to eq('2') + expect(subject.tag_depth_hash[well_p2_w1]).to eq('3') + expect(subject.tag_depth_hash[well_p2_w2]).to eq('1') + end + + it 'caches the result' do + well = parent_1_plate.wells[0] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + well.aliquots.first.sample.sample_metadata.donor_id = 1 + + subject.build_pools + expect(subject.tag_depth_hash).to be(subject.tag_depth_hash) # same instance + end + end + + describe '#transfer_material_from_parent!' do + let!(:wells) do # eager! + wells = [parent_1_plate.wells[0], parent_2_plate.wells[0]] + wells.each_with_index do |well, index| + well.state = 'passed' + well.aliquots.first.study = study_1 # same study + well.aliquots.first.project = project_1 # same project + well.aliquots.first.sample.sample_metadata.donor_id = index + 1 # different donors + end + end + + let!(:stub_transfer_material_request) do # eager! + allow(Sequencescape::Api::V2::Plate).to receive(:find_by).with(uuid: child_plate.uuid).and_return(child_plate) + stub_api_post( + 'transfer_request_collections', + payload: { + transfer_request_collection: { + user: user_uuid, + transfer_requests: subject.transfer_request_attributes(child_plate) + } + }, + body: '{}' + ) + end + it 'posts transfer requests to Sequencescape' do + subject.transfer_material_from_parent!(child_plate.uuid) + expect(stub_transfer_material_request).to have_been_made + end + end + + describe '#valid?' do + describe '#source_barcodes_must_be_entered' do + let(:barcodes) { [] } + it 'reports the error' do + expect(subject).not_to be_valid + expect(subject.errors[:source_barcodes]).to include(described_class::SOURCE_BARCODES_MUST_BE_ENTERED) + end + end + + describe '#source_barcodes_must_be_different' do + before do + allow(Sequencescape::Api::V2::Plate).to receive(:find_all) + .with({ barcode: barcodes }, includes: described_class::SOURCE_PLATE_INCLUDES) + .and_return([parent_1_plate]) + end + let(:barcodes) { [parent_1_plate.human_barcode] * 2 } + it 'reports the error' do + expect(subject).not_to be_valid + expect(subject.errors[:source_barcodes]).to include(described_class::SOURCE_BARCODES_MUST_BE_DIFFERENT) + end + + context 'with single barcode' do + let!(:wells) do + well = parent_1_plate.wells[0] + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + well.aliquots.first.sample.sample_metadata.donor_id = 1 + end + before do + allow(Sequencescape::Api::V2::Plate).to receive(:find_all) + .with({ barcode: barcodes }, includes: described_class::SOURCE_PLATE_INCLUDES) + .and_return([parent_1_plate]) + end + let(:barcodes) { [parent_1_plate.human_barcode] } + it { is_expected.to be_valid } + end + end + + describe '#source_plates_must_exist' do + let(:barcodes) { [parent_1_plate.human_barcode, 'NOT-A-PLATE-BARCODE'] } + before do + allow(Sequencescape::Api::V2::Plate).to receive(:find_all) + .with({ barcode: barcodes }, includes: described_class::SOURCE_PLATE_INCLUDES) + .and_return([parent_1_plate]) + end + it 'reports the error' do + expect(subject).not_to be_valid + expect(subject.errors[:source_plates]).to include( + format(described_class::SOURCE_PLATES_MUST_EXIST, 'NOT-A-PLATE-BARCODE') + ) + end + end + + describe '#number_of_pools_must_not_exceed_configured' do + let!(:wells) do + # Up to 20 wells, the number of pools is configured as 1. If multiple + # studies, projects or the same donors are present, the number of pools + # calculated will be more than 1. + wells = [parent_1_plate.wells[0], parent_1_plate.wells[1], parent_2_plate.wells[0]] + studies = [study_1, study_2, study_3] + wells.each_with_index do |well, index| + well.state = 'passed' + well.aliquots.first.study = studies[index] # different studies + well.aliquots.first.project = project_1 # same project + well.aliquots.first.sample.sample_metadata.donor_id = 1 # same donor + end + end + it 'reports the error' do + expect(subject).not_to be_valid + expect(subject.errors[:source_plates]).to include( + format(described_class::NUMBER_OF_POOLS_MUST_NOT_EXCEED_CONFIGURED, 3, 1) + ) + end + end + + describe '#wells_with_aliquots_must_have_donor_id' do + let!(:wells) do + wells = Array(parent_1_plate.wells[0..3]) + Array(parent_2_plate.wells[0..1]) + wells.each do |well| + well.state = 'passed' + well.aliquots.first.study = study_1 + well.aliquots.first.project = project_1 + end + wells[0].aliquots.first.sample.sample_metadata.donor_id = 1 # OK + wells[1].aliquots = nil # no aliquots: OK + wells[2].aliquots = [] # no aliquots: OK + wells[3].aliquots.first.sample.sample_metadata.donor_id = nil # ERROR + wells[4].aliquots.first.sample.sample_metadata.donor_id = '' # ERROR + wells[5].aliquots.first.sample.sample_metadata.donor_id = ' ' # ERROR + wells + end + it 'reports the error' do + expect(subject).not_to be_valid + invalid_wells_hash = { + parent_1_plate.human_barcode => [wells[3].location], + parent_2_plate.human_barcode => [wells[4].location, wells[5].location] + } + formatted_string = invalid_wells_hash.map { |barcode, wells| "#{barcode}: #{wells.join(', ')}" }.join(' ') + expect(subject.errors[:source_plates]).to include( + format(described_class::WELLS_WITH_ALIQUOTS_MUST_HAVE_DONOR_ID, formatted_string) + ) + end + end + end +end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 7fbb54a60..1da732cb8 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -188,6 +188,7 @@ # Wipe out existing purposes Settings.purposes = {} Settings.pipelines = PipelineList.new + Settings.poolings = {} end factory_bot_results = {}