diff --git a/configurations/app_config.yml b/configurations/app_config.yml index ed4cead..147e888 100644 --- a/configurations/app_config.yml +++ b/configurations/app_config.yml @@ -15,3 +15,11 @@ ELASTICSEARCH_BACKUP_FOLDER: "path/to/elasticsearch/backup/folder" verify_certs: False ELASTIC_PASSWORD: elasticsearch_user_password BASE_FOLDER: /etc/searchengine/ +DATA_SOURCES: + - name: omero1 + DATABASE: + DATABASE_NAME: omero + DATABSE_USER: khaledk + DATABASE_PASSWORD: khaled + DATABASE_SERVER_URI: 192.168.0.190 + DATABASE_PORT: 5432 diff --git a/configurations/configuration.py b/configurations/configuration.py index 1742b24..e7e8f5e 100644 --- a/configurations/configuration.py +++ b/configurations/configuration.py @@ -26,15 +26,21 @@ def load_configuration_variables_from_file(config): # loading application configuration variables from a file print("Injecting config variables from :%s" % app_config.INSTANCE_CONFIG) - with open(app_config.INSTANCE_CONFIG) as f: - cofg = yaml.load(f) + with open(app_config.INSTANCE_CONFIG, 'rt') as f: + + #with open(app_config.INSTANCE_CONFIG) as f: + cofg = yaml.safe_load(f.read()) + + + print (cofg) for x, y in cofg.items(): setattr(config, x, y) if hasattr(config, "verify_certs"): try: verify_certs = json.load(config.verify_certs) except Exception as ex: - print(str(ex)) + print (x) + print("Error %s"%str(ex)) verify_certs = False else: verify_certs = False @@ -53,43 +59,68 @@ def set_database_connection_variables(config): :param database: databse name :return: """ - if hasattr(config, "DATABASE_PORT"): - address = config.DATABASE_SERVER_URI + ":%s" % app_config.DATABASE_PORT - else: - address = config.DATABASE_SERVER_URI - app_config.database_connector = "" - app_config.DATABASE_URI = "postgresql://%s:%s@%s/%s" % ( - config.DATABASE_USER, - config.DATABASE_PASSWORD, - address, - config.DATABASE_NAME, - ) - + from omero_search_engine.database.database_connector import DatabaseConnector -def update_config_file(updated_configuration): + config.database_connectors={} + for source in config.DATA_SOURCES: + if source.get("DATABASE").get("DATABASE_PORT"): + address = source.get("DATABASE").get("DATABASE_SERVER_URI") + ":%s" % source.get("DATABASE").get("DATABASE_PORT") + else: + address = source.get("DATABASE").get("DATABASE_SERVER_URI") + DATABASE_URI = "postgresql://%s:%s@%s/%s" % ( + source.get("DATABASE").get("DATABASE_USER"), + source.get("DATABASE").get("DATABASE_PASSWORD"), + address, + source.get("DATABASE").get("DATABASE_NAME") + ) + database_connector = DatabaseConnector( + source.get("DATABASE").get("DATABASE_NAME"), DATABASE_URI + ) + config.database_connectors[source.get("name")]= database_connector + + +def update_config_file(updated_configuration, configure_database=False): is_changed = False with open(app_config.INSTANCE_CONFIG) as f: configuration = yaml.load(f) - found = [] - for key, value in updated_configuration.items(): - if key in configuration: - if configuration[key] != value: - configuration[key] = value - is_changed = True - print("%s is Updated, new value is %s " % (key, value)) - else: - found.append(key) - if len(found) != len(updated_configuration): + if not configure_database: + found = [] for key, value in updated_configuration.items(): - if key not in found: - configuration[key] = value - print("%s value is added with value %s " % (key, value)) - is_changed = True + if key in configuration: + if configuration[key] != value: + configuration[key] = value + is_changed = True + print("%s is Updated, new value is %s " % (key, value)) + else: + found.append(key) + if len(found) != len(updated_configuration): + for key, value in updated_configuration.items(): + if key not in found: + configuration[key] = value + print("%s value is added with value %s " % (key, value)) + is_changed = True + else: + is_changed = config_database(configuration, updated_configuration) if is_changed: with open(app_config.INSTANCE_CONFIG, "w") as f: yaml.dump(configuration, f) +def config_database(configuration, updated_configuration): + for data_source in configuration.get("DATA_SOURCES"): + changed=False + Found = False + if data_source["name"].lower()==updated_configuration["name"].lower(): + Found = True + for k, v in updated_configuration["DATABASE"].items(): + if data_source["DATABASE"][k] !=v: + data_source["DATABASE"][k]=v + changed=True + break + if not Found: + configuration.get("DATA_SOURCES").append(updated_configuration) + changed = True + return changed class app_config(object): # the configuration can be loadd from yml file later diff --git a/manage.py b/manage.py index 9dd39dc..9386e98 100644 --- a/manage.py +++ b/manage.py @@ -127,12 +127,17 @@ def restore_postgresql_database(): "--resource", help="resource name, creating all the indexes for all the resources is the default", # noqa ) +@manager.option( + "-s", + "--source", + help="data source name, ndexeing all the data sources is the default", # noqa +) @manager.option( "-b", "--backup", help="if True, backup will be called ", # noqa ) -def get_index_data_from_database(resource="all", backup="True"): +def get_index_data_from_database(resource="all", source="all", backup="True"): """ insert data in Elasticsearch index for each resource It gets the data from postgres database server @@ -145,21 +150,27 @@ def get_index_data_from_database(resource="all", backup="True"): save_key_value_buckets, ) import json - backup = json.loads(backup.lower()) - if resource != "all": - sql_st = sqls_resources.get(resource) - if not sql_st: - return - get_insert_data_to_index(sql_st, resource) - else: + for data_source in search_omero_app.config.database_connectors.keys(): + if source.lower()!="all" and data_source.lower() != source.lower(): + continue + #if resource != "all": + # sql_st = sqls_resources.get(resource) + # if not sql_st: + # return + # get_insert_data_to_index(sql_st, resource) + # else: for res, sql_st in sqls_resources.items(): - get_insert_data_to_index(sql_st, res) + if resource.lower()!="all" and resource.lower() != res.lower(): + continue + get_insert_data_to_index(sql_st, res, data_source) + + save_key_value_buckets( - resource_table_=None, re_create_index=True, only_values=False + resource_table_=None, data_source=data_source, re_create_index=True ,only_values=False ) # validat ethe indexing - test_indexing_search_query(deep_check=False, check_studies=True) + test_indexing_search_query(source=data_source, deep_check=False, check_studies=True) # backup the index data if backup: @@ -173,14 +184,20 @@ def get_index_data_from_database(resource="all", backup="True"): @manager.option("-d", "--database", help="database name") @manager.option("-n", "--name", help="database usernname") @manager.option("-p", "--password", help="database username password") +@manager.option("-w", "--working_data_source", help="data source") def set_database_configuration( - url=None, server_port_number=None, database=None, name=None, password=None + working_data_source=None,url=None, server_port_number=None, database=None, name=None, password=None ): + if not working_data_source: + print ("Data source is required to process") database_attrs = {} - if url: - database_attrs["DATABASE_SERVER_URI"] = url + databse_config={} + databse_config["name"]=working_data_source + databse_config["DATABASE"]=database_attrs if database: database_attrs["DATABASE_NAME"] = database + if url: + database_attrs["DATABASE_SERVER_URI"] = url if name: database_attrs["DATABASE_USER"] = name if password: @@ -189,7 +206,7 @@ def set_database_configuration( database_attrs["DATABASE_PORT"] = server_port_number if len(database_attrs) > 0: - update_config_file(database_attrs) + update_config_file(databse_config, configure_database=True) else: search_omero_app.logger.info( "At least one database attribute\ @@ -319,8 +336,13 @@ def cache_key_value_index(resource=None, create_index=None, only_values=None): "--deep_check", help="compare all the images from both search engine and database server, default is False so it will compare the number of images and the first searchengine page", # noqa ) +@manager.option( + "-s", + "--source", + help="data source name, ndexeing all the data sources is the default", # noqa +) def test_indexing_search_query( - json_file="app_data/test_index_data.json", deep_check=False, check_studies=False + json_file="app_data/test_index_data.json", source=None, deep_check=False, check_studies=False ): """ test the indexing and the searchengine query functions @@ -338,12 +360,15 @@ def test_indexing_search_query( get_omero_stats, get_no_images_sql_containers, ) + if not source: + print ("Data source is required to process ") + return - validate_queries(json_file, deep_check) + validate_queries(json_file, source, deep_check) if check_studies: test_no_images() get_omero_stats() - get_no_images_sql_containers() + get_no_images_sql_containers(data_source=source) @manager.command diff --git a/omero_search_engine/__init__.py b/omero_search_engine/__init__.py index adf38f9..33be0da 100644 --- a/omero_search_engine/__init__.py +++ b/omero_search_engine/__init__.py @@ -62,16 +62,16 @@ def create_app(config_name="development"): app_config = configLooader.get(config_name) load_configuration_variables_from_file(app_config) set_database_connection_variables(app_config) - database_connector = DatabaseConnector( - app_config.DATABASE_NAME, app_config.DATABASE_URI - ) + print ("config.database_connectors::::::", app_config.database_connectors) + #atabase_connector = DatabaseConnector( + # app_config.DATABASE_NAME, app_config.DATABASE_URI + # search_omero_app.config.from_object(app_config) search_omero_app.app_context() search_omero_app.app_context().push() search_omero_app.app_context() search_omero_app.app_context().push() ELASTIC_PASSWORD = app_config.ELASTIC_PASSWORD - es_connector = Elasticsearch( app_config.ELASTICSEARCH_URL.split(","), verify_certs=app_config.verify_certs, @@ -82,8 +82,8 @@ def create_app(config_name="development"): scheme="https", http_auth=("elastic", ELASTIC_PASSWORD), ) - - search_omero_app.config["database_connector"] = database_connector + search_omero_app.config.database_connectors= app_config.database_connectors + print(search_omero_app.config.database_connectors) search_omero_app.config["es_connector"] = es_connector log_folder = os.path.join(os.path.expanduser("~"), "logs") if not os.path.exists(log_folder): @@ -117,7 +117,6 @@ def create_app(config_name="development"): resources_routers_blueprint_v1, url_prefix="/api/v1/resources" ) - # add it to account for CORS @search_omero_app.after_request def after_request(response): diff --git a/omero_search_engine/api/v1/resources/query_handler.py b/omero_search_engine/api/v1/resources/query_handler.py index f54a449..f4b4435 100644 --- a/omero_search_engine/api/v1/resources/query_handler.py +++ b/omero_search_engine/api/v1/resources/query_handler.py @@ -108,9 +108,9 @@ def adjust_resource(self): ac_value = check_get_names( self.value, self.resource, self.name, True ) - if len(ac_value) == 1: + if ac_value and len(ac_value) == 1: self.value = ac_value[0] - elif len(ac_value) == 0: + elif not ac_value or len(ac_value) == 0: self.value = -1 else: self.value = ac_value diff --git a/omero_search_engine/api/v1/resources/resource_analyser.py b/omero_search_engine/api/v1/resources/resource_analyser.py index 877a3ba..c58ff25 100644 --- a/omero_search_engine/api/v1/resources/resource_analyser.py +++ b/omero_search_engine/api/v1/resources/resource_analyser.py @@ -790,8 +790,13 @@ def get_the_results(resource, name, description, es_index="key_values_resource_c and name.lower() in item.get("description").lower() ) ] - else: + elif "resourcename" in hits[0]["_source"]: + print("==================================") + print ("========>>>>",hits[0]["_source"]) + print ("==================================") returned_results = [item for item in hits[0]["_source"]["resourcename"]] + else: + return returned_results # remove container description from the results, # should be added again later after cleaning up the description diff --git a/omero_search_engine/cache_functions/elasticsearch/transform_data.py b/omero_search_engine/cache_functions/elasticsearch/transform_data.py index 529e8a8..1d7cf06 100644 --- a/omero_search_engine/cache_functions/elasticsearch/transform_data.py +++ b/omero_search_engine/cache_functions/elasticsearch/transform_data.py @@ -435,7 +435,7 @@ def insert_resource_data(folder, resource, from_json): total_process = 0 -def get_insert_data_to_index(sql_st, resource): +def get_insert_data_to_index(sql_st, resource, data_source): """ - Query the postgreSQL database server and get metadata (key-value pair) - Process the results data @@ -449,15 +449,18 @@ def get_insert_data_to_index(sql_st, resource): delete_index(resource) create_omero_indexes(resource) sql_ = "select max (id) from %s" % resource - res2 = search_omero_app.config["database_connector"].execute_query(sql_) + res2 = search_omero_app.config.database_connectors[data_source].execute_query(sql_) + #res2 = search_omero_app.config["database_connector"].execute_query(sql_) max_id = res2[0]["max"] + if not max_id: + return page_size = search_omero_app.config["CACHE_ROWS"] start_time = datetime.now() cur_max_id = page_size vals = [] # Prepare the multiprocessing data while True: - vals.append((cur_max_id, (cur_max_id - page_size), resource)) + vals.append((cur_max_id, (cur_max_id - page_size), resource, data_source)) if cur_max_id > max_id: break cur_max_id += page_size @@ -488,7 +491,7 @@ def get_insert_data_to_index(sql_st, resource): search_omero_app.logger.info(cur_max_id) delta = str(datetime.now() - start_time) search_omero_app.logger.info("Total time=%s" % delta) - print(res) + #print(res) finally: pool.close() @@ -500,6 +503,7 @@ def processor_work(lock, global_counter, val): cur_max_id = val[0] range = val[1] resource = val[2] + data_source = val[3] search_omero_app.logger.info("%s, %s, %s" % (cur_max_id, range, resource)) from omero_search_engine.cache_functions.elasticsearch.sql_to_csv import ( sqls_resources, @@ -522,7 +526,7 @@ def processor_work(lock, global_counter, val): search_omero_app.logger.info( "Calling the databas for %s/%s" % (global_counter.value, total_process) ) - conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[data_source] results = conn.execute_query(mod_sql) search_omero_app.logger.info("Processing the results...") process_results(results, resource, lock) @@ -632,7 +636,7 @@ def insert_plate_data(folder, plate_file): def save_key_value_buckets( - resource_table_=None, re_create_index=False, only_values=False + resource_table_=None, data_source=None, re_create_index=False, only_values=False ): """ Query the database and get all available keys and values for @@ -641,6 +645,8 @@ def save_key_value_buckets( It will use multiprocessing pool to use parallel processing """ + if data_source==None: + return es_index = "key_value_buckets_information" es_index_2 = "key_values_resource_cach" @@ -671,13 +677,13 @@ def save_key_value_buckets( %s ......." % resource_table ) - resource_keys = get_keys(resource_table) + resource_keys = get_keys(resource_table, data_source) name_results = None if resource_table in ["project", "screen"]: sql = "select id, name,description from {resource}".format( resource=resource_table ) - conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[data_source] name_result = conn.execute_query(sql) # name_results = [res["name"] for res in name_results] # Determine the number of images for each container @@ -785,14 +791,15 @@ def save_key_value_buckets_process(lock, global_counter, vals): wrong_keys[resource_table] = [key] -def get_keys(res_table): +def get_keys(res_table, data_source): sql = "select distinct (name) from annotation_mapvalue\ inner join {res_table}annotationlink on\ {res_table}annotationlink.child=\ annotation_mapvalue.annotation_id".format( res_table=res_table ) - results = search_omero_app.config["database_connector"].execute_query(sql) + results = search_omero_app.config.database_connectors[data_source].execute_query(sql) + #results = search_omero_app.config["database_connector"].execute_query(sql) results = [res["name"] for res in results] return results diff --git a/omero_search_engine/validation/results_validator.py b/omero_search_engine/validation/results_validator.py index e9a9108..af6b3f7 100644 --- a/omero_search_engine/validation/results_validator.py +++ b/omero_search_engine/validation/results_validator.py @@ -73,8 +73,9 @@ class Validator(object): and from the searchengine """ - def __init__(self, deep_check=False): + def __init__(self, data_source, deep_check=False): self.deep_check = deep_check + self.data_source=data_source self.identical = True def set_simple_query(self, resource, name, value, type="keyvalue"): @@ -138,7 +139,8 @@ def get_in_sql(self, clauses, name="in_clause"): names=names, values=values, operator="not in" ) # sql = query_methods[name].substitute(names=names, values=values) - conn = search_omero_app.config["database_connector"] + #conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] postgres_results = conn.execute_query(sql) results = [item["id"] for item in postgres_results] search_omero_app.logger.info( @@ -158,7 +160,8 @@ def get_or_sql(self, clauses, name="query_image_or"): values = "'%s'" % claus[1].lower() # sql = query_methods[name].substitute(names=names, values=values) sql = query_methods[name].substitute(names=names, values=values, operator="in") - conn = search_omero_app.config["database_connector"] + #conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] postgres_results = conn.execute_query(sql) results = [item["id"] for item in postgres_results] search_omero_app.logger.info( @@ -171,7 +174,8 @@ def get_sql_value_only(self, clauses): operator = "or" else: operator = "and" - conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] + #conn = search_omero_app.config["database_connector"] all_res = [] for val in clauses: sql = query_methods["image_value_only"].substitute(value=val) @@ -196,7 +200,8 @@ def get_and_sql(self, clauses): name=claus[0].lower(), value=claus[1].lower(), ) - conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] + #conn = search_omero_app.config["database_connector"] postgres_results = conn.execute_query(sql) res = [item["id"] for item in postgres_results] search_omero_app.logger.info("results for 'and' received %s" % len(res)) @@ -217,11 +222,13 @@ def get_results_db(self, operator=None): sql = query_methods["available_values_for_key"].substitute( name=self.name ) - conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] + #conn = search_omero_app.config["database_connector"] self.postgres_results = conn.execute_query(sql) elif self.value: sql = query_methods["search_any_value"].substitute(val_part=self.value) - conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] + #conn = search_omero_app.config["database_connector"] self.postgres_results = conn.execute_query(sql) return if self.type == "in_clause": @@ -273,7 +280,8 @@ def get_results_db(self, operator=None): sql = sql + " and %s.group_id=%s" % (self.resource, self.group_id) print(sql) # search_omero_app.logger.info ("sql: %s"%sql) - conn = search_omero_app.config["database_connector"] + #conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] postgres_results = conn.execute_query(sql) self.postgres_results = [item["id"] for item in postgres_results] search_omero_app.logger.info( @@ -491,7 +499,8 @@ def get_containers_test_cases(self): projects_count_sql = query_methods["projects_count"].substitute( key=self.name, value=self.value ) - conn = search_omero_app.config["database_connector"] + #conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[self.data_source] screens_results = conn.execute_query(screens_count_sql) projects_results = conn.execute_query(projects_count_sql) screens_results_idr = [item["name"] for item in screens_results] @@ -624,7 +633,7 @@ def compare_results(self, operator=None): ) -def validate_queries(json_file, deep_check): +def validate_queries(json_file, data_source, deep_check): import json import os @@ -651,7 +660,7 @@ def validate_queries(json_file, deep_check): search_omero_app.logger.info( "Testing (equals) %s for name: %s, key: %s" % (resource, name, value) ) - validator = Validator(deep_check) + validator = Validator(data_source, deep_check) validator.set_simple_query(resource, name, value) if resource == "image": mess = validator.get_containers_test_cases() @@ -672,7 +681,7 @@ def validate_queries(json_file, deep_check): % (resource, name, value) ) if resource == "image": - not_equals_validator = Validator(deep_check) + not_equals_validator = Validator(data_source, deep_check) not_equals_validator.set_simple_query(resource, name, value) res = not_equals_validator.compare_results("not_equals") elabsed_time = str(datetime.now() - start_time) @@ -686,7 +695,7 @@ def validate_queries(json_file, deep_check): for name, cases_ in complex_test_cases.items(): for cases in cases_: start_time = datetime.now() - validator_c = Validator(deep_check) + validator_c = Validator(data_source, deep_check) validator_c.set_complex_query(name, cases) res = validator_c.compare_results() messages.append( @@ -701,7 +710,7 @@ def validate_queries(json_file, deep_check): for resource, cases in query_in.items(): for case in cases: start_time = datetime.now() - validator_in = Validator(deep_check) + validator_in = Validator(data_source, deep_check) validator_in.set_in_query(case, resource) res = validator_in.compare_results() messages.append( @@ -718,7 +727,7 @@ def validate_queries(json_file, deep_check): search_omero_app.logger.info("Total time=%s" % str(end_in - start_time)) # test the same but change the operator to not in search_omero_app.logger.info("Total time=%s" % str(end_in - start_time)) - validator_not_in = Validator(deep_check) + validator_not_in = Validator(data_source,deep_check) validator_not_in.set_in_query(case, resource, type="not_in_clause") res = validator_not_in.compare_results() messages.append( @@ -903,7 +912,7 @@ def get_omero_stats(): f.write(report) -def check_number_images_sql_containers_using_ids(): +def check_number_images_sql_containers_using_ids(data_source): """ This method tests the number of images inside each container (project or screen) in the searchengine index data @@ -921,7 +930,8 @@ def check_number_images_sql_containers_using_ids(): dd = True - conn = search_omero_app.config["database_connector"] + #conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[data_source] all_names = get_resource_names("all") for resource in all_names: search_omero_app.logger.info( @@ -998,7 +1008,7 @@ def check_number_images_sql_containers_using_ids(): return dd -def get_no_images_sql_containers(write_report=True): +def get_no_images_sql_containers(data_source, write_report=True): """ This method tests the number of images inside each container (project or screen) in the searchengine index data @@ -1010,7 +1020,9 @@ def get_no_images_sql_containers(write_report=True): ) from omero_search_engine.api.v1.resources.utils import adjust_query_for_container - conn = search_omero_app.config["database_connector"] + #conn = search_omero_app.config["database_connector"] + conn = search_omero_app.config.database_connectors[data_source] + all_names = get_resource_names("all") messages = [] diff --git a/unit_tests/test_app.py b/unit_tests/test_app.py index 5566739..6090436 100644 --- a/unit_tests/test_app.py +++ b/unit_tests/test_app.py @@ -69,6 +69,8 @@ # deep_check should be a configuration item deep_check = True +#for data_source in search_omero_app.config.database_connectors.keys(): + class BasicTestCase(unittest.TestCase): def setUp(self): @@ -80,7 +82,6 @@ def tearDown(self): def test_api_v1(self): """test url""" tester = search_omero_app.test_client(self) - response = tester.get("/api/v1/resources/", content_type="html/text") self.assertEqual(response.status_code, 200) @@ -108,9 +109,10 @@ def test_query_database(self): """ test connection with postgresql database """ - res = search_omero_app.config["database_connector"].execute_query(sql) - self.assertIsNotNone(res) - self.assertEqual(res[0]["current_database"], "omero") + for data_source in search_omero_app.config.database_connectors.keys(): + res = search_omero_app.config.database_connectors[data_source].execute_query(sql) + self.assertIsNotNone(res) + self.assertEqual(res[0]["current_database"], "omero") def validate_json_syntax(self, json_template): try: @@ -168,73 +170,66 @@ def test_single_query(self): test query the search engine and compare its results with the results from the database """ - for resource, cases in simple_queries.items(): - for case in cases: - name = case[0] - value = case[1] - validator = Validator(deep_check) - validator.set_simple_query(resource, name, value) - validator.get_results_db("equals") - validator.get_results_searchengine("equals") + for data_source in search_omero_app.config.database_connectors.keys(): + for resource, cases in simple_queries.items(): + for case in cases: + name = case[0] + value = case[1] + validator = Validator(data_source, deep_check) + validator.set_simple_query(resource, name, value) + validator.get_results_db("equals") + validator.get_results_searchengine("equals") + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) + validator.get_results_db("not_equals") + validator.get_results_searchengine("not_equals") + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) + self.assertTrue(validator.identical) + + def test_and_query(self): + name = "query_image_and" + for data_source in search_omero_app.config.database_connectors.keys(): + for cases in query_image_and: + validator = Validator(data_source, deep_check) + validator.set_complex_query(name, cases) + validator.compare_results() self.assertEqual( len(validator.postgres_results), validator.searchengine_results.get("size"), ) - validator.get_results_db("not_equals") - validator.get_results_searchengine("not_equals") + self.assertTrue(validator.identical) + + def test_or_query(self): + for data_source in search_omero_app.config.database_connectors.keys(): + name = "query_image_or" + for cases in query_image_or: + validator = Validator(data_source, deep_check) + validator.set_complex_query(name, cases) + validator.compare_results() self.assertEqual( len(validator.postgres_results), validator.searchengine_results.get("size"), ) self.assertTrue(validator.identical) - def test_and_query(self): - name = "query_image_and" - for cases in query_image_and: - validator = Validator(deep_check) - validator.set_complex_query(name, cases) - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("size"), - ) - self.assertTrue(validator.identical) - - def test_or_query(self): - name = "query_image_or" - for cases in query_image_or: - validator = Validator(deep_check) - validator.set_complex_query(name, cases) - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("size"), - ) - self.assertTrue(validator.identical) - def test_no_images_containers(self): - self.assertTrue(check_number_images_sql_containers_using_ids()) + for data_source in search_omero_app.config.database_connectors.keys(): + self.assertTrue(check_number_images_sql_containers_using_ids(data_source)) def test_multi_or_quries(self): pass def test_complex_query(self): - name = "query_image_and_or" - for cases in query_image_and_or: - validator = Validator(deep_check) - validator.set_complex_query(name, cases) - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("size"), - ) - self.assertTrue(validator.identical) - - def test_in_query(self): - for resource, cases in query_in.items(): - for case in cases: - validator = Validator(deep_check) - validator.set_in_query(case, resource) + for data_source in search_omero_app.config.database_connectors.keys(): + name = "query_image_and_or" + for cases in query_image_and_or: + validator = Validator(data_source, deep_check) + validator.set_complex_query(name, cases) validator.compare_results() self.assertEqual( len(validator.postgres_results), @@ -242,104 +237,124 @@ def test_in_query(self): ) self.assertTrue(validator.identical) + def test_in_query(self): + for data_source in search_omero_app.config.database_connectors.keys(): + for resource, cases in query_in.items(): + for case in cases: + validator = Validator(data_source, deep_check) + validator.set_in_query(case, resource) + validator.compare_results() + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) + self.assertTrue(validator.identical) + def test_not_in_query(self): - for resource, cases in query_in.items(): - for case in cases: - validator = Validator(deep_check) - validator.set_in_query(case, resource, type="not_in_clause") + for data_source in search_omero_app.config.database_connectors.keys(): + for resource, cases in query_in.items(): + for case in cases: + validator = Validator(data_source, deep_check) + validator.set_in_query(case, resource, type="not_in_clause") + validator.compare_results() + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) + self.assertTrue(validator.identical) + + def test_seach_for_any_value(self): + for data_source in search_omero_app.config.database_connectors.keys(): + for part in images_value_parts: + validator = Validator(data_source, deep_check) + validator.set_simple_query("image", None, part, type="buckets") validator.compare_results() self.assertEqual( len(validator.postgres_results), - validator.searchengine_results.get("size"), + validator.searchengine_results.get("total_number_of_buckets"), ) - self.assertTrue(validator.identical) - - def test_seach_for_any_value(self): - for part in images_value_parts: - validator = Validator(deep_check) - validator.set_simple_query("image", None, part, type="buckets") - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("total_number_of_buckets"), - ) def test_available_values_for_key(self): - for image_key in images_keys: - validator = Validator(deep_check) - validator.set_simple_query("image", image_key, None, type="buckets") - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("total_number_of_buckets"), - ) - - def test_contains_not_contains_queries(self): - for resource, cases in contains_not_contains_queries.items(): - for case in cases: - name = case[0] - value = case[1] - validator = Validator(deep_check) - validator.set_contains_not_contains_query(resource, name, value) - validator.get_results_db("contains") - validator.get_results_searchengine("contains") - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("size"), - ) - validator.get_results_db("not_contains") - validator.get_results_searchengine("not_contains") + for data_source in search_omero_app.config.database_connectors.keys(): + for image_key in images_keys: + validator = Validator(data_source, deep_check) + validator.set_simple_query("image", image_key, None, type="buckets") + validator.compare_results() self.assertEqual( len(validator.postgres_results), - validator.searchengine_results.get("size"), + validator.searchengine_results.get("total_number_of_buckets"), ) - self.assertTrue(validator.identical) + + def test_contains_not_contains_queries(self): + for data_source in search_omero_app.config.database_connectors.keys(): + for resource, cases in contains_not_contains_queries.items(): + for case in cases: + name = case[0] + value = case[1] + validator = Validator(data_source, deep_check) + validator.set_contains_not_contains_query(resource, name, value) + validator.get_results_db("contains") + validator.get_results_searchengine("contains") + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) + validator.get_results_db("not_contains") + validator.get_results_searchengine("not_contains") + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) + self.assertTrue(validator.identical) def test_owner(self): - for resource, cases in image_owner.items(): - for case in cases: - name = case[0] - value = case[1] - owner_id = case[2] - validator = Validator(deep_check) - validator.set_simple_query(resource, name, value) - validator.set_owner_group(owner_id=owner_id) - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("size"), - ) + for data_source in search_omero_app.config.database_connectors.keys(): + for resource, cases in image_owner.items(): + for case in cases: + name = case[0] + value = case[1] + owner_id = case[2] + validator = Validator(data_source, deep_check) + validator.set_simple_query(resource, name, value) + validator.set_owner_group(owner_id=owner_id) + validator.compare_results() + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) def test_group(self): - for resource, cases in image_group.items(): - for case in cases: - name = case[0] - value = case[1] - group_id = case[2] - validator = Validator(deep_check) - validator.set_simple_query(resource, name, value) - validator.set_owner_group(group_id=group_id) - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("size"), - ) + for data_source in search_omero_app.config.database_connectors.keys(): + for resource, cases in image_group.items(): + for case in cases: + name = case[0] + value = case[1] + group_id = case[2] + validator = Validator(data_source, deep_check) + validator.set_simple_query(resource, name, value) + validator.set_owner_group(group_id=group_id) + validator.compare_results() + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) def test_owner_group(self): - for resource, cases in image_owner_group.items(): - for case in cases: - name = case[0] - value = case[1] - owner_id = case[2] - group_id = case[3] - validator = Validator(deep_check) - validator.set_simple_query(resource, name, value) - validator.set_owner_group(owner_id=owner_id, group_id=group_id) - validator.compare_results() - self.assertEqual( - len(validator.postgres_results), - validator.searchengine_results.get("size"), - ) + for data_source in search_omero_app.config.database_connectors.keys(): + for resource, cases in image_owner_group.items(): + for case in cases: + name = case[0] + value = case[1] + owner_id = case[2] + group_id = case[3] + validator = Validator(data_source, deep_check) + validator.set_simple_query(resource, name, value) + validator.set_owner_group(owner_id=owner_id, group_id=group_id) + validator.compare_results() + self.assertEqual( + len(validator.postgres_results), + validator.searchengine_results.get("size"), + ) # def test_add_delete_es_index(self): # '''