Skip to content

Commit

Permalink
Fix bug make_lite_output in case correlation table is NULL because to…
Browse files Browse the repository at this point in the history
…o few samples
  • Loading branch information
browaeysrobin committed Aug 18, 2023
1 parent 3b003ff commit c242b52
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 21 deletions.
37 changes: 19 additions & 18 deletions R/lr_target_correlation.R
Original file line number Diff line number Diff line change
Expand Up @@ -171,25 +171,26 @@ lr_target_prior_cor_inference = function(receivers_oi, abundance_expression_info
common_samples = intersect(colnames(lr_prod_mat_oi), colnames(target_mat))
if(length(common_samples) < 5){
warning(paste0("not enough samples for a correlation analysis for the celltype ",receiver_oi))
return(NULL)
cor_df = NULL
} else {
lr_prod_mat_oi = lr_prod_mat_oi[,common_samples]
target_mat = target_mat[,common_samples]

# pearson
cor_mat = Hmisc::rcorr(lr_prod_mat_oi %>% t(), target_mat %>% t())

cor_df_pearson = cor_mat$r %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, pearson, -id) %>% tibble::as_tibble()
cor_df_pearson_pval = cor_mat$P %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, pearson_pval, -id) %>% tibble::as_tibble()

# spearman
cor_mat = Hmisc::rcorr(lr_prod_mat_oi %>% t(), target_mat %>% t(), type = "spearman")

cor_df_spearman = cor_mat$r %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, spearman, -id) %>% tibble::as_tibble()
cor_df_spearman_pval = cor_mat$P %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, spearman_pval, -id) %>% tibble::as_tibble()

cor_df = lig_rec_send_rec_mapping %>% dplyr::inner_join(cor_df_pearson, by = "id") %>% dplyr::inner_join(cor_df_pearson_pval, by = c("id", "target")) %>% dplyr::inner_join(cor_df_spearman, by = c("id", "target")) %>% dplyr::inner_join(cor_df_spearman_pval, by = c("id", "target"))
}
lr_prod_mat_oi = lr_prod_mat_oi[,common_samples]
target_mat = target_mat[,common_samples]

# pearson
cor_mat = Hmisc::rcorr(lr_prod_mat_oi %>% t(), target_mat %>% t())

cor_df_pearson = cor_mat$r %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, pearson, -id) %>% tibble::as_tibble()
cor_df_pearson_pval = cor_mat$P %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, pearson_pval, -id) %>% tibble::as_tibble()

# spearman
cor_mat = Hmisc::rcorr(lr_prod_mat_oi %>% t(), target_mat %>% t(), type = "spearman")

cor_df_spearman = cor_mat$r %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, spearman, -id) %>% tibble::as_tibble()
cor_df_spearman_pval = cor_mat$P %>% .[,rownames(target_mat)] %>% data.frame() %>% tibble::rownames_to_column("id") %>% tidyr::gather(target, spearman_pval, -id) %>% tibble::as_tibble()

cor_df = lig_rec_send_rec_mapping %>% dplyr::inner_join(cor_df_pearson, by = "id") %>% dplyr::inner_join(cor_df_pearson_pval, by = c("id", "target")) %>% dplyr::inner_join(cor_df_spearman, by = c("id", "target")) %>% dplyr::inner_join(cor_df_spearman_pval, by = c("id", "target"))

return(cor_df)
# print(cor_df)
# scaling of the correlation metric -- Don't do this for now
# cor_df = cor_df %>% dplyr::ungroup() %>% dplyr::mutate(scaled_pearson = nichenetr::scale_quantile(pearson, 0.05), scaled_spearman = nichenetr::scale_quantile(spearman, 0.05)) # is this scaling necessary?
Expand Down
13 changes: 10 additions & 3 deletions R/pipeline_wrappers.R
Original file line number Diff line number Diff line change
Expand Up @@ -972,7 +972,11 @@ make_lite_output = function(multinichenet_output, top_n_LR = 2500){
multinichenet_output$prioritization_tables$group_prioritization_tbl = multinichenet_output$prioritization_tables$group_prioritization_tbl %>% dplyr::inner_join(LR_subset, by = c("sender", "receiver", "ligand", "receptor"))
multinichenet_output$prioritization_tables$sample_prioritization_tbl = multinichenet_output$prioritization_tables$sample_prioritization_tbl %>% dplyr::inner_join(LR_subset, by = c("sender", "receiver", "ligand", "receptor"))

multinichenet_output$lr_target_prior_cor = multinichenet_output$lr_target_prior_cor %>% dplyr::inner_join(LR_subset_cor, by = c("sender", "receiver", "ligand", "receptor")) %>% dplyr::filter(target %in% gene_subset)
if(nrow(multinichenet_output$lr_target_prior_cor) > 0){
multinichenet_output$lr_target_prior_cor = multinichenet_output$lr_target_prior_cor %>% dplyr::inner_join(LR_subset_cor, by = c("sender", "receiver", "ligand", "receptor")) %>% dplyr::filter(target %in% gene_subset)
} else{
multinichenet_output$lr_target_prior_cor = NULL
}

} else {
if("receiver_info" %in% names(multinichenet_output)) {
Expand Down Expand Up @@ -1025,8 +1029,11 @@ make_lite_output = function(multinichenet_output, top_n_LR = 2500){
multinichenet_output$prioritization_tables$group_prioritization_tbl = multinichenet_output$prioritization_tables$group_prioritization_tbl %>% dplyr::inner_join(LR_subset, by = c("sender", "receiver", "ligand", "receptor"))
multinichenet_output$prioritization_tables$sample_prioritization_tbl = multinichenet_output$prioritization_tables$sample_prioritization_tbl %>% dplyr::inner_join(LR_subset, by = c("sender", "receiver", "ligand", "receptor"))

multinichenet_output$lr_target_prior_cor = multinichenet_output$lr_target_prior_cor %>% dplyr::inner_join(LR_subset_cor, by = c("sender", "receiver", "ligand", "receptor")) %>% dplyr::filter(target %in% gene_subset)

if(nrow(multinichenet_output$lr_target_prior_cor) > 0){
multinichenet_output$lr_target_prior_cor = multinichenet_output$lr_target_prior_cor %>% dplyr::inner_join(LR_subset_cor, by = c("sender", "receiver", "ligand", "receptor")) %>% dplyr::filter(target %in% gene_subset)
} else{
multinichenet_output$lr_target_prior_cor = NULL
}
}

}
Expand Down

0 comments on commit c242b52

Please sign in to comment.