Skip to content

Commit

Permalink
Merge pull request #1290 from veg/develop
Browse files Browse the repository at this point in the history
2.5.29 rc
  • Loading branch information
spond authored Feb 23, 2021
2 parents bf69de3 + 82281fd commit 4f87ea2
Show file tree
Hide file tree
Showing 26 changed files with 347 additions and 254 deletions.
255 changes: 161 additions & 94 deletions res/TemplateBatchFiles/GARD.bf

Large diffs are not rendered by default.

141 changes: 58 additions & 83 deletions res/TemplateBatchFiles/PairwiseRelativeRatio.bf
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,7 @@ referenceSpecCount = ds.species;

treeRead = 0;

if (IS_TREE_PRESENT_IN_DATA)
{
if (IS_TREE_PRESENT_IN_DATA) {
treeString = DATAFILE_TREE;
treeRead = 1;
}
Expand All @@ -65,36 +64,29 @@ for (counter = 1; counter<fileCount; counter = counter+1) {

fprintf (stdout,"\n\n** All data files were read successfully **\n\n");

if (dataType)
{
if (codeTables)
{
dummy = ApplyGeneticCodeTable (codeTableMatrix[0]);
if (dataType) {
if (codeTables) {
ApplyGeneticCodeTable (codeTableMatrix[0]);
ModelMatrixDimension = 0;
}
DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
}
else
{
} else {
DataSetFilter filteredData = CreateFilter (ds,1);
}


if (treeRead)
{
if (treeRead) {
fprintf (stdout, "\n\nA tree was found in the data file:\n",treeString,"\n\nWould you like to use it:(Y/N)?");
fscanf (stdin, "String", response);
if ((response=="n")||(response=="N"))
{
if ((response=="n")||(response=="N")) {
treeRead = 0;
}
fprintf (stdout, "\n\n");
}

if (!treeRead)
{
if (!treeRead) {
SetDialogPrompt ("Please select a tree file for the data:");
fscanf (PROMPT_FOR_FILE, "String", treeString);
fscanf (PROMPT_FOR_FILE, REWIND, "String", treeString);
}

SelectTemplateModel(filteredData);
Expand All @@ -104,30 +96,20 @@ global RelRatio;
relationString = ":=RelRatio*";

#include "selectModelParameters.bf";

SetDialogPrompt ("Save full results to:");

modelParameterCount = Rows("LAST_MODEL_PARAMETER_LIST");

fprintf (PROMPT_FOR_FILE,CLEAR_FILE);

tabulatedFileName = LAST_FILE_PATH;


singleFileResults = {fileCount,1};

fprintf (stdout,"\n\n***** RUNNING SINGLE FILE ANALYSES *****\n\n");

fullParameterCount = 0;

/*MESSAGE_LOGGING = 0;*/

OPTIMIZATION_PRECISION = OPTIMIZATION_PRECISION/10;

timer = Time(0);

for (counter = 1; counter<= fileCount; counter += 1) {
HarvestFrequencies (vectorOfFrequencies,filteredData,1,1,1);


if (FREQUENCY_SENSITIVE) {
modelMatrix = 0;
if (USE_POSITION_SPECIFIC_FREQS) {
Expand All @@ -144,34 +126,9 @@ for (counter = 1; counter<= fileCount; counter += 1) {
}

Tree firstFileTree = treeString;

LikelihoodFunction lf = (filteredData,firstFileTree);
Optimize (res,lf);
DeleteObject (lf);


if (counter<fileCount) {
DeleteObject (lf, :shallow);

DataSet ds = ReadDataFile (stringMatrix[counter]);
if (dataType)
{
if (codeTables)
{
dummy = ApplyGeneticCodeTable (codeTableMatrix[counter]);
ModelMatrixDimension = 0;
}
DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
}
else
{
DataSetFilter filteredData = CreateFilter (ds,1);
}
}
else
{
fullParameterCount = res[1][1];
}
singleFileResults [counter-1] = res[1][0];
fprintf (stdout,"\nFile ",stringMatrix[counter-1]," : ln-likelihood = ",res[1][0]);
if (counter==1)
Expand Down Expand Up @@ -204,6 +161,27 @@ for (counter = 1; counter<= fileCount; counter += 1) {
{
fprintf (tabulatedFileName,",",res[0][counter3]);
}
DeleteObject (lf, :shallow);
if (counter<fileCount) {
DataSet ds = ReadDataFile (stringMatrix[counter]);
if (dataType)
{
if (codeTables)
{
ApplyGeneticCodeTable (codeTableMatrix[counter]);
ModelMatrixDimension = 0;
}
DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
}
else
{
DataSetFilter filteredData = CreateFilter (ds,1);
}
}
else
{
fullParameterCount = res[1][1];
}
}

/*OPTIMIZATION_PRECISION = OPTIMIZATION_PRECISION*10;*/
Expand All @@ -222,17 +200,17 @@ fprintf(stdout,"\n\n In the summary table below");

fprintf (stdout,"\n\n (*) corresponds to the .05 significance level\n (**) corresponds to the .01 significance level\n (***) corresponds to the .001 significance level.\n\n");

separator = "+--------+--------+--------------+--------------+";
fprintf (stdout,separator,"\n| File 1 | File 2 | LRT | P-Value |\n",separator);
separator = "+--------+--------+--------------+--------------+--------------+";
fprintf (stdout,separator,"\n| File 1 | File 2 | LRT | Rel.Ratio | P-Value |\n",separator);

for (counter = 0; counter< fileCount; counter = counter+1) {
for (counter = 0; counter< fileCount; counter += 1) {
DataSet ds = ReadDataFile (stringMatrix[counter]);

if (dataType)
{
if (codeTables)
{
dummy = ApplyGeneticCodeTable (codeTableMatrix[counter]);
ApplyGeneticCodeTable (codeTableMatrix[counter]);
ModelMatrixDimension = 0;
}
DataSetFilter filteredData = CreateFilter (ds,3,"","",GeneticCodeExclusions);
Expand Down Expand Up @@ -269,7 +247,7 @@ for (counter = 0; counter< fileCount; counter = counter+1) {
{
if (codeTables)
{
dummy = ApplyGeneticCodeTable (codeTableMatrix[counter2]);
ApplyGeneticCodeTable (codeTableMatrix[counter2]);
ModelMatrixDimension = 0;
}
DataSetFilter filteredData2 = CreateFilter (ds2,3,"","",GeneticCodeExclusions);
Expand Down Expand Up @@ -297,53 +275,49 @@ for (counter = 0; counter< fileCount; counter = counter+1) {
Model RRmodel2 = (modelMatrix2,vectorOfFrequencies2,MULTIPLY_BY_FREQS);
}
}


Tree secondFileTree = treeString;


ReplicateConstraint (constraintString,secondFileTree,firstFileTree);

LikelihoodFunction lfConstrained = (filteredData2,secondFileTree,filteredData,firstFileTree);

Optimize (res1,lfConstrained);

LRT = 2*(singleFileResults[counter]+singleFileResults[counter2]-res1[1][0]);

/*
Export (lfE, lfConstrained);
fprintf (stdout, lfE, "\n");
*/

degFDiff = 2*fullParameterCount-res1[1][1];

if (LRT>0)
{
if (LRT>0) {
pValue = 1.0-CChi2(LRT,degFDiff);
}
else
{
else {
pValue = 1;
fprintf (MESSAGE_LOG,"\nA negative LRT statistic encoutered. You may want to increase the optimization precision settings to resolve numerical apporximation errors");
}

fprintf (stdout," | ",Format (LRT,12,5)," | ", Format (pValue,12,8)," |");
fprintf (stdout," | ",Format (LRT,12,5)," | ",Format (RelRatio,12,5)," | ", Format (pValue,12,8)," |");

if (pValue<0.05)
{
if (pValue<0.01)
{
if (pValue<0.001)
{
if (pValue<0.05) {
if (pValue<0.01) {
if (pValue<0.001) {
fprintf (stdout," (***) ");
}
else
{
else {
fprintf (stdout," (**) ");
}
}
else
{
} else {
fprintf (stdout," (*) ");
}
}

if ((counter==0)&&(counter2==1))
{
fprintf (tabulatedFileName,"\n\nRelative Ratio Tests\n\nFile 1,File 2,Ln-Likelihood,LRT,pValue");
if ((counter==0)&&(counter2==1)) {
fprintf (tabulatedFileName,"\n\nRelative Ratio Tests\n\nFile 1,File 2,Ln-Likelihood,LRT,RelRatio,pValue");
dataDimension = Columns(res1);
for (counter3 = 0; counter3 < dataDimension; counter3=counter3+1)
{
Expand All @@ -352,12 +326,13 @@ for (counter = 0; counter< fileCount; counter = counter+1) {
}
}

fprintf (tabulatedFileName,"\n",Format(counter+1,0,0),",",Format(counter2+1,0,0),",",res1[1][0],",",LRT,",",pValue);
for (counter3 = 0; counter3 < dataDimension; counter3=counter3+1)
{
fprintf (tabulatedFileName,"\n",Format(counter+1,0,0),",",Format(counter2+1,0,0),",",res1[1][0],",",LRT,",",RelRation,",",pValue);
for (counter3 = 0; counter3 < dataDimension; counter3=counter3+1) {
fprintf (tabulatedFileName,",",res1[0][counter3]);
}
DeleteObject (lfConstrained, :shallow);
}

}

fprintf (stdout,"\n",separator,"\n");
Expand Down
9 changes: 6 additions & 3 deletions res/TemplateBatchFiles/SelectionAnalyses/RELAX.bf
Original file line number Diff line number Diff line change
Expand Up @@ -920,13 +920,17 @@ function relax.FitMainTestPair () {
} else {
parameters.SetRange (model.generic.GetGlobalParameter (relax.model_object_map ["relax.test"] , terms.relax.k), terms.relax.k_range1);
}

//assert (__SIGTRAP__);

relax.alternative_model.fit.take2 = estimators.FitLF (relax.filter_names, relax.trees, { "0" : relax.model_map},
relax.alternative_model.fit ,
relax.model_object_map,
{terms.run_options.retain_lf_object: TRUE}
);




if (relax.alternative_model.fit.take2 [terms.fit.log_likelihood] > relax.alternative_model.fit[terms.fit.log_likelihood]) {

Expand All @@ -947,6 +951,7 @@ function relax.FitMainTestPair () {
relax.alternative_model.fit = relax.alternative_model.fit.take2;
}

DeleteObject (relax.alternative_model.fit.take2);

parameters.SetRange (model.generic.GetGlobalParameter (relax.model_object_map ["relax.test"] , terms.relax.k), terms.relax.k_range);

Expand Down Expand Up @@ -995,8 +1000,7 @@ function relax.FitMainTestPair () {

io.ReportProgressMessageMD ("RELAX", "null", "Fitting the null (K := 1) model");




for (relax.k = 1; relax.k < relax.numbers_of_tested_groups; relax.k += 1) {
relax.model_nmsp = relax.model_namespaces[relax.k ];
if (relax.k > 1) {
Expand All @@ -1006,7 +1010,6 @@ function relax.FitMainTestPair () {
}
}


relax.null_model.fit = estimators.FitExistingLF (relax.alternative_model.fit[terms.likelihood_function], relax.model_object_map);
io.ReportProgressMessageMD ("RELAX", "null", "* " + selection.io.report_fit (relax.null_model.fit, 9, relax.codon_data_info[terms.data.sample_size]));
relax.LRT = math.DoLRT (relax.null_model.fit[terms.fit.log_likelihood], relax.alternative_model.fit[terms.fit.log_likelihood], relax.numbers_of_tested_groups-1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,10 @@ function load_file (prefix) {
utility.SetEnvVariable(utility.getGlobalValue ("terms.trees.data_for_neighbor_joining"),
codon_data_info[utility.getGlobalValue("terms.data.datafilter")]);


partitions_and_trees = trees.LoadAnnotatedTreeTopology.match_partitions (codon_data_info[utility.getGlobalValue("terms.data.partitions")], name_mapping);


utility.SetEnvVariable(utility.getGlobalValue ("terms.trees.data_for_neighbor_joining"), None);

/** this will return a dictionary of partition strings and trees; one set per partition, as in
Expand Down Expand Up @@ -155,7 +157,8 @@ function load_file (prefix) {
} else {
selected_branches = selection.io.defineBranchSets(partitions_and_trees);
}



// Place in own attribute called `tested`
selection.io.json_store_key_value_pair (json, None, utility.getGlobalValue("terms.json.tested"), selected_branches);

Expand Down Expand Up @@ -216,8 +219,10 @@ function load_file (prefix) {
}

function store_tree_information () {
// Place in own attribute called `tested`

// Place in own attribute called `tested`


selection.io.json_store_key_value_pair (json, None, utility.getGlobalValue("terms.json.tested"), selected_branches);

/** this will return a dictionary of selected branches; one set per partition, like in
Expand Down Expand Up @@ -255,6 +260,7 @@ function store_tree_information () {
}



selection.io.json_store_key_value_pair (json, None, utility.getGlobalValue("terms.json.partitions"),
filter_specification);
trees = utility.Map (partitions_and_trees, "_partition_", '_partition_[terms.data.tree]');
Expand All @@ -263,11 +269,31 @@ function store_tree_information () {
filter_names = utility.Map (filter_specification, "_partition_", '_partition_[terms.data.name]');

/* Store original name mapping */
for (partition_index = 0; partition_index < partition_count; partition_index += 1) {

selection.io.json_store_branch_attribute(json, utility.getGlobalValue ("terms.original_name"), utility.getGlobalValue ("terms.json.node_label"), display_orders[utility.getGlobalValue ("terms.original_name")],
partition_index,
name_mapping);

if (None != name_mapping) {
name_mapping_upper_case = {};
for (i,n; in; name_mapping) {
name_mapping_upper_case[i&&1] = n;
}

for (partition_index = 0; partition_index < partition_count; partition_index += 1) {

local_name_mapping = {};
for (l, label; in; (trees[partition_index])[utility.getGlobalValue ("terms.trees.partitioned")]) {
if (label == ^"terms.tree_attributes.leaf") {
if (name_mapping / l) {
local_name_mapping [l] = name_mapping [l];
} else {
local_name_mapping [l] = name_mapping_upper_case [l];
}
}
}


selection.io.json_store_branch_attribute(json, utility.getGlobalValue ("terms.original_name"), utility.getGlobalValue ("terms.json.node_label"), display_orders[utility.getGlobalValue ("terms.original_name")],
partition_index,
local_name_mapping);
}
}


Expand Down
Loading

1 comment on commit 4f87ea2

@kjlevitz
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark 'Benchmark.js Benchmark'.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 2.

Benchmark suite Current: 4f87ea2 Previous: 82281fd Ratio
ABSREL-MH.wbf Infinity secs/op (±0.000000%) null secs/op (±0.000000%) Infinity

This comment was automatically generated by workflow using github-action-benchmark.

CC: @klevitz

Please sign in to comment.