From bd871a06bc769769e909b11f36fcf2186491de34 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 8 Mar 2019 17:27:07 +0100 Subject: [PATCH 001/106] Add fabric sdk py use of discover api --- fabric-sdk-py_tests/fabric-sdk-py-discover.py | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 fabric-sdk-py_tests/fabric-sdk-py-discover.py diff --git a/fabric-sdk-py_tests/fabric-sdk-py-discover.py b/fabric-sdk-py_tests/fabric-sdk-py-discover.py new file mode 100644 index 000000000..09911e24e --- /dev/null +++ b/fabric-sdk-py_tests/fabric-sdk-py-discover.py @@ -0,0 +1,81 @@ +from hfc.fabric import Client +from hfc.fabric.channel.channel import Channel +from hfc.fabric.block_decoder import decode_fabric_MSP_config, decode_fabric_peers_info, decode_fabric_endpoint +from hfc.fabric.peer import create_peer +from hfc.fabric.user import create_user +from hfc.util.crypto.crypto import ecies +from hfc.util.keyvaluestore import FileKeyValueStore + +import pprint + +peer_config = {'clientKey': {'path': '/substra/data/orgs/owkin/tls/peer1/cli-client.key'}, + 'clientServer': {'path': '/substra/data/orgs/owkin/tls/peer1/cli-client.crt'}, + 'eventUrl': 'peer1-owkin:7053', + 'grpcOptions': {'grpc.http2.keepalive_time': 15, + 'grpc.ssl_target_name_override': 'peer1-owkin'}, + 'tlsCACerts': { + 'path': '/substra/data/orgs/owkin/ca-cert.pem'}, + 'url': 'peer1-owkin:7051'} + +peer1_owkin = create_peer(endpoint=peer_config['url'], + tls_cacerts=peer_config['tlsCACerts']['path'], + client_key=peer_config['clientKey']['path'], + client_cert=peer_config['clientServer']['path'], + opts=[(k, v) for k, v in peer_config['grpcOptions'].items()]) + +key_path = '/substra/data/orgs/owkin/admin/msp/keystore/6e9ee485f68eb8e71d830f0e46220562960088128b79a52f13692198230438b8_sk' +cert_path = '/substra/data/orgs/owkin/admin/msp/signcerts/cert.pem' + +admin_owkin = create_user(name='admin', + org='owkin', + state_store=FileKeyValueStore('/tmp/kvs/'), + msp_id='owkinMSP', + key_path=key_path, + cert_path=cert_path) + + +print(Client().query_peers(admin_owkin, peer1_owkin)) +print(Client().query_peers(admin_owkin, peer1_owkin, channel='mychannel', local=False)) + +response = Channel('mychannel', '')._discovery(admin_owkin, peer1_owkin, ecies(), config=True, local=True) + + +def process_config_result(config_result): + + results = {'msps': {}, + 'orderers': {}} + + for msp_name in config_result.msps: + results['msps'][msp_name] = decode_fabric_MSP_config(config_result.msps[msp_name].SerializeToString()) + + for orderer_msp in config_result.orderers: + results['orderers'][orderer_msp] = decode_fabric_endpoint(config_result.orderers[orderer_msp].endpoint) + + return results + + +def process_cc_query_res(cc_query_res): + pass + + +def process_members(members): + peers = [] + for msp_name in members.peers_by_org: + peers.append(decode_fabric_peers_info(members.peers_by_org[msp_name].peers)) + return peers + + +results = {} +for res in response.results: + # print(res) + print('-' * 100) + print('Error') + pprint.pprint(res.error) + print('-' * 50) + print('Config result') + process_config_result(res.config_result) + pprint.pprint(process_config_result(res.config_result), indent=2) + # print(f'Chaincode Query result : {res.cc_query_res}') + print('Members') + # pprint.pprint(process_members(res.members), indent=2) + print('#' * 100) From 805d1591b9af2813dba1911b1ca7ad4d1f931e97 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 8 Mar 2019 18:04:58 +0100 Subject: [PATCH 002/106] Update discover script. --- fabric-sdk-py_tests/fabric-sdk-py-discover.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/fabric-sdk-py_tests/fabric-sdk-py-discover.py b/fabric-sdk-py_tests/fabric-sdk-py-discover.py index 09911e24e..d305be9dc 100644 --- a/fabric-sdk-py_tests/fabric-sdk-py-discover.py +++ b/fabric-sdk-py_tests/fabric-sdk-py-discover.py @@ -1,12 +1,13 @@ from hfc.fabric import Client from hfc.fabric.channel.channel import Channel -from hfc.fabric.block_decoder import decode_fabric_MSP_config, decode_fabric_peers_info, decode_fabric_endpoint +from hfc.fabric.block_decoder import decode_fabric_MSP_config, decode_fabric_peers_info, decode_fabric_endpoints from hfc.fabric.peer import create_peer from hfc.fabric.user import create_user from hfc.util.crypto.crypto import ecies from hfc.util.keyvaluestore import FileKeyValueStore import pprint +import glob peer_config = {'clientKey': {'path': '/substra/data/orgs/owkin/tls/peer1/cli-client.key'}, 'clientServer': {'path': '/substra/data/orgs/owkin/tls/peer1/cli-client.crt'}, @@ -23,7 +24,7 @@ client_cert=peer_config['clientServer']['path'], opts=[(k, v) for k, v in peer_config['grpcOptions'].items()]) -key_path = '/substra/data/orgs/owkin/admin/msp/keystore/6e9ee485f68eb8e71d830f0e46220562960088128b79a52f13692198230438b8_sk' +key_path = glob.glob('/substra/data/orgs/owkin/admin/msp/keystore/*')[0] cert_path = '/substra/data/orgs/owkin/admin/msp/signcerts/cert.pem' admin_owkin = create_user(name='admin', @@ -34,10 +35,13 @@ cert_path=cert_path) -print(Client().query_peers(admin_owkin, peer1_owkin)) -print(Client().query_peers(admin_owkin, peer1_owkin, channel='mychannel', local=False)) +client = Client() +client._crypto_suite = ecies() -response = Channel('mychannel', '')._discovery(admin_owkin, peer1_owkin, ecies(), config=True, local=True) +print(client.query_peers(admin_owkin, peer1_owkin)) +print(client.query_peers(admin_owkin, peer1_owkin, channel='mychannel', local=False)) + +response = Channel('mychannel', '')._discovery(admin_owkin, peer1_owkin, client.crypto_suite, config=True, local=False) def process_config_result(config_result): @@ -49,7 +53,7 @@ def process_config_result(config_result): results['msps'][msp_name] = decode_fabric_MSP_config(config_result.msps[msp_name].SerializeToString()) for orderer_msp in config_result.orderers: - results['orderers'][orderer_msp] = decode_fabric_endpoint(config_result.orderers[orderer_msp].endpoint) + results['orderers'][orderer_msp] = decode_fabric_endpoints(config_result.orderers[orderer_msp].endpoint) return results @@ -73,9 +77,8 @@ def process_members(members): pprint.pprint(res.error) print('-' * 50) print('Config result') - process_config_result(res.config_result) pprint.pprint(process_config_result(res.config_result), indent=2) # print(f'Chaincode Query result : {res.cc_query_res}') print('Members') - # pprint.pprint(process_members(res.members), indent=2) + pprint.pprint(process_members(res.members), indent=2) print('#' * 100) From 9b0e946f6a86942fc1aab42ec5448c04aa501170 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 8 Mar 2019 18:20:45 +0100 Subject: [PATCH 003/106] Fix crypto suite. --- fabric-sdk-py_tests/fabric-sdk-py-discover.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/fabric-sdk-py_tests/fabric-sdk-py-discover.py b/fabric-sdk-py_tests/fabric-sdk-py-discover.py index d305be9dc..abcd65a45 100644 --- a/fabric-sdk-py_tests/fabric-sdk-py-discover.py +++ b/fabric-sdk-py_tests/fabric-sdk-py-discover.py @@ -36,12 +36,11 @@ client = Client() -client._crypto_suite = ecies() print(client.query_peers(admin_owkin, peer1_owkin)) print(client.query_peers(admin_owkin, peer1_owkin, channel='mychannel', local=False)) -response = Channel('mychannel', '')._discovery(admin_owkin, peer1_owkin, client.crypto_suite, config=True, local=False) +response = Channel('mychannel', '')._discovery(admin_owkin, peer1_owkin, admin_owkin.crypto_suite, config=True, local=False) def process_config_result(config_result): From 0ba180b3a12e4b0fa412dc3aca36553ccf7d66b7 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 15 Mar 2019 11:07:58 +0100 Subject: [PATCH 004/106] Update discovery calls --- fabric-sdk-py_tests/fabric-sdk-py-discover.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/fabric-sdk-py_tests/fabric-sdk-py-discover.py b/fabric-sdk-py_tests/fabric-sdk-py-discover.py index abcd65a45..0bf3127d5 100644 --- a/fabric-sdk-py_tests/fabric-sdk-py-discover.py +++ b/fabric-sdk-py_tests/fabric-sdk-py-discover.py @@ -40,7 +40,12 @@ print(client.query_peers(admin_owkin, peer1_owkin)) print(client.query_peers(admin_owkin, peer1_owkin, channel='mychannel', local=False)) -response = Channel('mychannel', '')._discovery(admin_owkin, peer1_owkin, admin_owkin.crypto_suite, config=True, local=False) +client.init_with_discovery(admin_owkin, peer1_owkin, + 'mychannel') + +response = Channel('', '')._discovery(admin_owkin, peer1_owkin, config=False, local=True) + +response = Channel('mychannel', '')._discovery(admin_owkin, peer1_owkin, config=True, local=False) def process_config_result(config_result): From a78e39e70848b9ae95bf5041c2c40b57762021d9 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 4 Apr 2019 17:17:31 +0200 Subject: [PATCH 005/106] clean code - be PEP8 compliant - limit indentation as much as possibke - avoid try/except nested blocks - avoid try with a big block: should be a single line (when possible) - avoid catching directly Exception base class (when possible) - fail fast when it makes sense - rename ambiguous variable name (l) --- substrabac/substrapp/views/objective.py | 482 ++++++++++++------------ 1 file changed, 231 insertions(+), 251 deletions(-) diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index c849a7ba4..705517f58 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -29,66 +29,59 @@ @app.task(bind=True, ignore_result=False) def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): + subtuple_directory = build_subtuple_folders({'key': pkhash}) + + metrics_path_dst = os.path.join(subtuple_directory, 'metrics/metrics.py') + if not os.path.exists(metrics_path_dst): + shutil.copy2(metrics_path, os.path.join(subtuple_directory, 'metrics/metrics.py')) + os.remove(metrics_path) + + datamanager = getObjectFromLedger(test_data_manager_key, 'queryDataManager') + opener_content, opener_computed_hash = get_computed_hash(datamanager['opener']['storageAddress']) + with open(os.path.join(subtuple_directory, 'opener/opener.py'), 'wb') as opener_file: + opener_file.write(opener_content) + + # Launch verification + client = docker.from_env() + pred_path = os.path.join(subtuple_directory, 'pred') + opener_file = os.path.join(subtuple_directory, 'opener/opener.py') + metrics_file = os.path.join(subtuple_directory, 'metrics/metrics.py') + metrics_path = os.path.join(getattr(settings, 'PROJECT_ROOT'), 'fake_metrics') # base metrics comes with substrabac + + metrics_docker = 'metrics_dry_run' # tag must be lowercase for docker + metrics_docker_name = f'{metrics_docker}_{pkhash}' + volumes = {pred_path: {'bind': '/sandbox/pred', 'mode': 'rw'}, + metrics_file: {'bind': '/sandbox/metrics/__init__.py', 'mode': 'ro'}, + opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}} + + client.images.build(path=metrics_path, + tag=metrics_docker, + rm=False) + + job_args = {'image': metrics_docker, + 'name': metrics_docker_name, + 'cpuset_cpus': '0-0', + 'mem_limit': '1G', + 'command': None, + 'volumes': volumes, + 'shm_size': '8G', + 'labels': ['dryrun'], + 'detach': False, + 'auto_remove': False, + 'remove': False} try: - subtuple_directory = build_subtuple_folders({'key': pkhash}) - - metrics_path_dst = os.path.join(subtuple_directory, 'metrics/metrics.py') - if not os.path.exists(metrics_path_dst): - shutil.copy2(metrics_path, os.path.join(subtuple_directory, 'metrics/metrics.py')) - os.remove(metrics_path) - try: - datamanager = getObjectFromLedger(test_data_manager_key, 'queryDataManager') - except JsonException as e: - raise e - else: - opener_content, opener_computed_hash = get_computed_hash(datamanager['opener']['storageAddress']) - with open(os.path.join(subtuple_directory, 'opener/opener.py'), 'wb') as opener_file: - opener_file.write(opener_content) - - # Launch verification - client = docker.from_env() - pred_path = os.path.join(subtuple_directory, 'pred') - opener_file = os.path.join(subtuple_directory, 'opener/opener.py') - metrics_file = os.path.join(subtuple_directory, 'metrics/metrics.py') - metrics_path = os.path.join(getattr(settings, 'PROJECT_ROOT'), 'fake_metrics') # base metrics comes with substrabac - - metrics_docker = 'metrics_dry_run' # tag must be lowercase for docker - metrics_docker_name = f'{metrics_docker}_{pkhash}' - volumes = {pred_path: {'bind': '/sandbox/pred', 'mode': 'rw'}, - metrics_file: {'bind': '/sandbox/metrics/__init__.py', 'mode': 'ro'}, - opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}} - - client.images.build(path=metrics_path, - tag=metrics_docker, - rm=False) - - job_args = {'image': metrics_docker, - 'name': metrics_docker_name, - 'cpuset_cpus': '0-0', - 'mem_limit': '1G', - 'command': None, - 'volumes': volumes, - 'shm_size': '8G', - 'labels': ['dryrun'], - 'detach': False, - 'auto_remove': False, - 'remove': False} - client.containers.run(**job_args) - - # Verify that the pred file exist assert os.path.exists(os.path.join(pred_path, 'perf.json')) except ContainerError as e: raise Exception(e.stderr) - except Exception as e: - raise str(e) + finally: try: container = client.containers.get(metrics_docker_name) container.remove() - except: + except BaseException: pass remove_subtuple_materials(subtuple_directory) @@ -151,99 +144,94 @@ def create(self, request, *args, **kwargs): return Response({'message': e.args, 'pkhash': pkhash}, status=status.HTTP_400_BAD_REQUEST) - else: - - if dryrun: - try: - metrics_path = os.path.join(getattr(settings, 'DRYRUN_ROOT'), f'metrics_{pkhash}.py') - with open(metrics_path, 'wb') as metrics_file: - metrics_file.write(metrics.open().read()) - - task = compute_dryrun.apply_async((metrics_path, test_data_manager_key, pkhash), queue=f"{settings.LEDGER['name']}.dryrunner") - url_http = 'http' if settings.DEBUG else 'https' - site_port = getattr(settings, "SITE_PORT", None) - current_site = f'{getattr(settings, "SITE_HOST")}' - if site_port: - current_site = f'{current_site}:{site_port}' - task_route = f'{url_http}://{current_site}{reverse("substrapp:task-detail", args=[task.id])}' - msg = f'Your dry-run has been taken in account. You can follow the task execution on {task_route}' - except Exception as e: - return Response({'message': f'Could not launch objective creation with dry-run on this instance: {str(e)}'}, - status=status.HTTP_400_BAD_REQUEST) - else: - return Response({'id': task.id, 'message': msg}, status=status.HTTP_202_ACCEPTED) - - # create on db + + if dryrun: try: - instance = self.perform_create(serializer) - except IntegrityError as exc: - try: - pkhash = re.search('\(pkhash\)=\((\w+)\)', exc.args[0]).group(1) - except: - pkhash = '' - return Response({'message': 'A objective with this description file already exists.', 'pkhash': pkhash}, - status=status.HTTP_409_CONFLICT) - except Exception as exc: - return Response({'message': exc.args}, + metrics_path = os.path.join(getattr(settings, 'DRYRUN_ROOT'), f'metrics_{pkhash}.py') + with open(metrics_path, 'wb') as metrics_file: + metrics_file.write(metrics.open().read()) + + task = compute_dryrun.apply_async((metrics_path, test_data_manager_key, pkhash), queue=f"{settings.LEDGER['name']}.dryrunner") + except Exception as e: + return Response({'message': f'Could not launch objective creation with dry-run on this instance: {str(e)}'}, status=status.HTTP_400_BAD_REQUEST) - else: - # init ledger serializer - ledger_serializer = LedgerObjectiveSerializer(data={'test_data_sample_keys': test_data_sample_keys, - 'test_data_manager_key': test_data_manager_key, - 'name': data.get('name'), - 'permissions': data.get('permissions'), - 'metrics_name': data.get('metrics_name'), - 'instance': instance}, - context={'request': request}) - - if not ledger_serializer.is_valid(): - # delete instance - instance.delete() - raise ValidationError(ledger_serializer.errors) - - # create on ledger - data, st = ledger_serializer.create(ledger_serializer.validated_data) - - if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED, status.HTTP_408_REQUEST_TIMEOUT): - return Response(data, status=st) - - headers = self.get_success_headers(serializer.data) - d = dict(serializer.data) - d.update(data) - return Response(d, status=st, headers=headers) - def create_or_update_objective(self, objective, pk): + url_http = 'http' if settings.DEBUG else 'https' + current_site = f'{getattr(settings, "SITE_HOST")}' + site_port = getattr(settings, "SITE_PORT", None) + if site_port: + current_site = f'{current_site}:{site_port}' + task_route = f'{url_http}://{current_site}{reverse("substrapp:task-detail", args=[task.id])}' + msg = f'Your dry-run has been taken in account. You can follow the task execution on {task_route}' + + return Response({'id': task.id, 'message': msg}, status=status.HTTP_202_ACCEPTED) + + # create on db try: - # get objective description from remote node - url = objective['description']['storageAddress'] + instance = self.perform_create(serializer) + except IntegrityError as exc: try: - r = requests.get(url, headers={'Accept': 'application/json;version=0.0'}) # TODO pass cert - except: - raise Exception(f'Failed to fetch {url}') - else: - if r.status_code != 200: - raise Exception(f'end to end node report {r.text}') - - try: - computed_hash = self.compute_hash(r.content) - except Exception: - raise Exception('Failed to fetch description file') - else: - if computed_hash != pk: - msg = 'computed hash is not the same as the hosted file. Please investigate for default of synchronization, corruption, or hacked' - raise Exception(msg) - - f = tempfile.TemporaryFile() - f.write(r.content) - - # save/update objective in local db for later use - instance, created = Objective.objects.update_or_create(pkhash=pk, validated=True) - instance.description.save('description.md', f) + pkhash = re.search(r'\(pkhash\)=\((\w+)\)', exc.args[0]).group(1) + except BaseException: + pkhash = '' + return Response({'message': 'A objective with this description file already exists.', 'pkhash': pkhash}, + status=status.HTTP_409_CONFLICT) + except Exception as exc: + return Response({'message': exc.args}, + status=status.HTTP_400_BAD_REQUEST) - except Exception as e: - raise e - else: - return instance + # init ledger serializer + ledger_serializer = LedgerObjectiveSerializer(data={'test_data_sample_keys': test_data_sample_keys, + 'test_data_manager_key': test_data_manager_key, + 'name': data.get('name'), + 'permissions': data.get('permissions'), + 'metrics_name': data.get('metrics_name'), + 'instance': instance}, + context={'request': request}) + + if not ledger_serializer.is_valid(): + # delete instance + instance.delete() + raise ValidationError(ledger_serializer.errors) + + # create on ledger + data, st = ledger_serializer.create(ledger_serializer.validated_data) + + if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED, status.HTTP_408_REQUEST_TIMEOUT): + return Response(data, status=st) + + headers = self.get_success_headers(serializer.data) + d = dict(serializer.data) + d.update(data) + return Response(d, status=st, headers=headers) + + def create_or_update_objective(self, objective, pk): + # get objective description from remote node + url = objective['description']['storageAddress'] + try: + r = requests.get(url, headers={'Accept': 'application/json;version=0.0'}) # TODO pass cert + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + raise Exception(f'Failed to fetch {url}') + if r.status_code != status.HTTP_200_OK: + raise Exception(f'end to end node report {r.text}') + + try: + computed_hash = self.compute_hash(r.content) + except Exception: + raise Exception('Failed to fetch description file') + + if computed_hash != pk: + msg = 'computed hash is not the same as the hosted file. Please investigate for default of synchronization, corruption, or hacked' + raise Exception(msg) + + f = tempfile.TemporaryFile() + f.write(r.content) + + # save/update objective in local db for later use + instance, created = Objective.objects.update_or_create(pkhash=pk, validated=True) + instance.description.save('description.md', f) + + return instance def retrieve(self, request, *args, **kwargs): lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field @@ -254,45 +242,32 @@ def retrieve(self, request, *args, **kwargs): try: int(pk, 16) # test if pk is correct (hexadecimal) - except: + except ValueError: return Response({'message': f'Wrong pk {pk}'}, status.HTTP_400_BAD_REQUEST) - else: - # get instance from remote node + + # get instance from remote node + try: + data = getObjectFromLedger(pk, self.ledger_query_call) + except JsonException as e: + return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) + + # try to get it from local db to check if description exists + try: + instance = self.get_object() + except Http404: + instance = None + + if not instance or not instance.description: try: - data = getObjectFromLedger(pk, self.ledger_query_call) - except JsonException as e: - return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) - else: - error = None - instance = None - try: - # try to get it from local db to check if description exists - instance = self.get_object() - except Http404: - try: - instance = self.create_or_update_objective(data, pk) - except Exception as e: - error = e - else: - # check if instance has description - if not instance.description: - try: - instance = self.create_or_update_objective(data, pk) - except Exception as e: - error = e - finally: - if error is not None: - return Response({'message': str(error)}, status=status.HTTP_400_BAD_REQUEST) - - # do not give access to local files address - if instance is not None: - serializer = self.get_serializer(instance, - fields=('owner', 'pkhash', 'creation_date', 'last_modified')) - data.update(serializer.data) - else: - data = {'message': 'Fail to get instance'} - - return Response(data, status=status.HTTP_200_OK) + instance = self.create_or_update_objective(data, pk) + except Exception as e: + return Response({'message': str(e)}, status=status.HTTP_400_BAD_REQUEST) + + # do not give access to local files address + serializer = self.get_serializer( + instance, fields=('owner', 'pkhash', 'creation_date', 'last_modified')) + data.update(serializer.data) + return Response(data, status=status.HTTP_200_OK) def list(self, request, *args, **kwargs): # can modify result by interrogating `request.version` @@ -300,88 +275,93 @@ def list(self, request, *args, **kwargs): data, st = queryLedger({ 'args': '{"Args":["queryObjectives"]}' }) + + data = [] if data is None else data + objectives = [data] + + if st != 200: + return Response(objectives, status=st) + dataManagerData = None algoData = None modelData = None - # init list to return - if data is None: - data = [] - l = [data] - - if st == 200: - - # parse filters - query_params = request.query_params.get('search', None) - - if query_params is not None: - try: - filters = get_filters(query_params) - except Exception as exc: - return Response( - {'message': f'Malformed search filters {query_params}'}, - status=status.HTTP_400_BAD_REQUEST) - else: - # filtering, reset l to an empty array - l = [] - for idx, filter in enumerate(filters): - # init each list iteration to data - l.append(data) - for k, subfilters in filter.items(): - if k == 'objective': # filter by own key - for key, val in subfilters.items(): - if key == 'metrics': # specific to nested metrics - l[idx] = [x for x in l[idx] if x[key]['name'] in val] - else: - l[idx] = [x for x in l[idx] if x[key] in val] - elif k == 'dataset': # select objective used by these datamanagers - if not dataManagerData: - # TODO find a way to put this call in cache - dataManagerData, st = queryLedger({ - 'args': '{"Args":["queryDataManagers"]}' - }) - if st != status.HTTP_200_OK: - return Response(dataManagerData, status=st) - if dataManagerData is None: - dataManagerData = [] - - for key, val in subfilters.items(): - filteredData = [x for x in dataManagerData if x[key] in val] - dataManagerKeys = [x['key'] for x in filteredData] - objectiveKeys = [x['objectiveKey'] for x in filteredData] - l[idx] = [x for x in l[idx] if x['key'] in objectiveKeys or x['testData']['dataManagerKey'] in dataManagerKeys] - elif k == 'algo': # select objective used by these algo - if not algoData: - # TODO find a way to put this call in cache - algoData, st = queryLedger({ - 'args': '{"Args":["queryAlgos"]}' - }) - if st != status.HTTP_200_OK: - return Response(algoData, status=st) - if algoData is None: - algoData = [] - - for key, val in subfilters.items(): - filteredData = [x for x in algoData if x[key] in val] - objectiveKeys = [x['objectiveKey'] for x in filteredData] - l[idx] = [x for x in l[idx] if x['key'] in objectiveKeys] - elif k == 'model': # select objectives used by outModel hash - if not modelData: - # TODO find a way to put this call in cache - modelData, st = queryLedger({ - 'args': '{"Args":["queryTraintuples"]}' - }) - if st != status.HTTP_200_OK: - return Response(modelData, status=st) - if modelData is None: - modelData = [] - - for key, val in subfilters.items(): - filteredData = [x for x in modelData if x['outModel'] is not None and x['outModel'][key] in val] - objectiveKeys = [x['objective']['hash'] for x in filteredData] - l[idx] = [x for x in l[idx] if x['key'] in objectiveKeys] - - return Response(l, status=st) + # parse filters + query_params = request.query_params.get('search', None) + if query_params is None: + return Response(objectives, status=st) + + try: + filters = get_filters(query_params) + except Exception: + return Response( + {'message': f'Malformed search filters {query_params}'}, + status=status.HTTP_400_BAD_REQUEST) + + # filtering + objectives = [] + for idx, filter in enumerate(filters): + # init each list iteration to data + objectives.append(data) + + for k, subfilters in filter.items(): + if k == 'objective': # filter by own key + for key, val in subfilters.items(): + if key == 'metrics': # specific to nested metrics + objectives[idx] = [x for x in objectives[idx] if x[key]['name'] in val] + else: + objectives[idx] = [x for x in objectives[idx] if x[key] in val] + + elif k == 'dataset': # select objective used by these datamanagers + if not dataManagerData: + # TODO find a way to put this call in cache + dataManagerData, st = queryLedger({ + 'args': '{"Args":["queryDataManagers"]}' + }) + if st != status.HTTP_200_OK: + return Response(dataManagerData, status=st) + if dataManagerData is None: + dataManagerData = [] + + for key, val in subfilters.items(): + filteredData = [x for x in dataManagerData if x[key] in val] + dataManagerKeys = [x['key'] for x in filteredData] + objectiveKeys = [x['objectiveKey'] for x in filteredData] + objectives[idx] = [x for x in objectives[idx] if x['key'] in objectiveKeys or x['testData']['dataManagerKey'] in dataManagerKeys] + + elif k == 'algo': # select objective used by these algo + if not algoData: + # TODO find a way to put this call in cache + algoData, st = queryLedger({ + 'args': '{"Args":["queryAlgos"]}' + }) + if st != status.HTTP_200_OK: + return Response(algoData, status=st) + if algoData is None: + algoData = [] + + for key, val in subfilters.items(): + filteredData = [x for x in algoData if x[key] in val] + objectiveKeys = [x['objectiveKey'] for x in filteredData] + objectives[idx] = [x for x in objectives[idx] if x['key'] in objectiveKeys] + + elif k == 'model': # select objectives used by outModel hash + if not modelData: + # TODO find a way to put this call in cache + modelData, st = queryLedger({ + 'args': '{"Args":["queryTraintuples"]}' + }) + if st != status.HTTP_200_OK: + return Response(modelData, status=st) + if modelData is None: + modelData = [] + + for key, val in subfilters.items(): + filteredData = [x for x in modelData if x['outModel'] is not None and x['outModel'][key] in val] + objectiveKeys = [x['objective']['hash'] for x in filteredData] + objectives[idx] = [x for x in objectives[idx] if x['key'] in objectiveKeys] + + return Response(objectives, status=st) @action(detail=True) def description(self, request, *args, **kwargs): From 45b6ef611ce150f00d835775430212a6a3fd7a32 Mon Sep 17 00:00:00 2001 From: Samuel Date: Wed, 3 Apr 2019 11:51:39 +0200 Subject: [PATCH 006/106] set conflict error on objective create small refacto to avoid duplicated code in query objective tests --- substrabac/substrapp/tests/tests_query.py | 60 ++++++++++++++++++----- substrabac/substrapp/views/objective.py | 11 +++-- substrabac/substrapp/views/utils.py | 9 ++++ 3 files changed, 63 insertions(+), 17 deletions(-) diff --git a/substrabac/substrapp/tests/tests_query.py b/substrabac/substrapp/tests/tests_query.py index a82e0e77b..b974e2584 100644 --- a/substrabac/substrapp/tests/tests_query.py +++ b/substrabac/substrapp/tests/tests_query.py @@ -39,10 +39,10 @@ def setUp(self): os.makedirs(MEDIA_ROOT) self.objective_description, self.objective_description_filename, \ - self.objective_metrics, self.objective_metrics_filename = get_sample_objective() + self.objective_metrics, self.objective_metrics_filename = get_sample_objective() self.data_description, self.data_description_filename, self.data_data_opener, \ - self.data_opener_filename = get_sample_datamanager() + self.data_opener_filename = get_sample_datamanager() def tearDown(self): try: @@ -50,47 +50,83 @@ def tearDown(self): except FileNotFoundError: pass - def test_add_objective_sync_ok(self): - # add associated data opener - datamanager_name = 'slide opener' - DataManager.objects.create(name=datamanager_name, + def add_default_data_manager(self): + DataManager.objects.create(name='slide opener', description=self.data_description, data_opener=self.data_data_opener) - url = reverse('substrapp:objective-list') + def get_default_objective_data(self): + # XXX reload fixtures as it is an opened buffer and a post will + # modify the objects + desc, _, metrics, _ = get_sample_objective() + expected_hash = get_hash(self.objective_description) data = { 'name': 'tough objective', 'test_data_manager_key': get_hash(self.data_data_opener), 'test_data_sample_keys': [ '5c1d9cd1c2c1082dde0921b56d11030c81f62fbb51932758b58ac2569dd0b379', '5c1d9cd1c2c1082dde0921b56d11030c81f62fbb51932758b58ac2569dd0b389'], - 'description': self.objective_description, - 'metrics': self.objective_metrics, + 'description': desc, + 'metrics': metrics, 'permissions': 'all', 'metrics_name': 'accuracy' } + return expected_hash, data + + def test_add_objective_sync_ok(self): + self.add_default_data_manager() + + pkhash, data = self.get_default_objective_data() + + url = reverse('substrapp:objective-list') extra = { 'HTTP_ACCEPT': 'application/json;version=0.0', } with mock.patch.object(LedgerObjectiveSerializer, 'create') as mcreate: - mcreate.return_value = { - 'pkhash': 'a554bb7adf2cad37ea8b140dc07359dd6e6cbffb067d568d3ba7b3a9de1ed2f3'}, status.HTTP_201_CREATED + mcreate.return_value = {'pkhash': pkhash}, status.HTTP_201_CREATED response = self.client.post(url, data, format='multipart', **extra) r = response.json() - self.assertEqual(r['pkhash'], get_hash(self.objective_description)) + self.assertEqual(r['pkhash'], pkhash) self.assertEqual(r['validated'], False) self.assertEqual(r['description'], f'http://testserver/media/objectives/{r["pkhash"]}/{self.objective_description_filename}') self.assertEqual(r['metrics'], f'http://testserver/media/objectives/{r["pkhash"]}/{self.objective_metrics_filename}') + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + + def test_add_objective_conflict(self): + self.add_default_data_manager() + pkhash, data = self.get_default_objective_data() + + url = reverse('substrapp:objective-list') + + extra = { + 'HTTP_ACCEPT': 'application/json;version=0.0', + } + + with mock.patch.object(LedgerObjectiveSerializer, 'create') as mcreate: + mcreate.return_value = {'pkhash': pkhash}, status.HTTP_201_CREATED + + response = self.client.post(url, data, format='multipart', **extra) + r = response.json() + + self.assertEqual(r['pkhash'], pkhash) self.assertEqual(response.status_code, status.HTTP_201_CREATED) + # XXX reload data as the previous call to post change it + _, data = self.get_default_objective_data() + response = self.client.post(url, data, format='multipart', **extra) + r = response.json() + + self.assertEqual(response.status_code, status.HTTP_409_CONFLICT) + self.assertEqual(r['pkhash'], pkhash) + def test_add_objective_no_sync_ok(self): # add associated data opener datamanager_name = 'slide opener' diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 705517f58..5f30101f3 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -24,7 +24,7 @@ from substrapp.utils import queryLedger, get_hash, get_computed_hash from substrapp.tasks import build_subtuple_folders, remove_subtuple_materials -from substrapp.views.utils import get_filters, getObjectFromLedger, ComputeHashMixin, ManageFileMixin, JsonException +from substrapp.views.utils import get_filters, getObjectFromLedger, ComputeHashMixin, ManageFileMixin, JsonException, find_primary_key_error @app.task(bind=True, ignore_result=False) @@ -140,10 +140,11 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) - except Exception as e: - return Response({'message': e.args, - 'pkhash': pkhash}, - status=status.HTTP_400_BAD_REQUEST) + except ValidationError as e: + conflict_error = find_primary_key_error(e) + st = (status.HTTP_409_CONFLICT if conflict_error else + status.HTTP_400_BAD_REQUEST) + return Response({'message': e.args, 'pkhash': pkhash}, status=st) if dryrun: try: diff --git a/substrabac/substrapp/views/utils.py b/substrabac/substrapp/views/utils.py index 7d39a37b7..d47616681 100644 --- a/substrabac/substrapp/views/utils.py +++ b/substrabac/substrapp/views/utils.py @@ -108,3 +108,12 @@ def manage_file(self, field): data = getattr(object, field) return CustomFileResponse(open(data.path, 'rb'), as_attachment=True, filename=os.path.basename(data.path)) + + +def find_primary_key_error(validation_error, key_name='pkhash'): + for detail in validation_error.detail: + if key_name in detail: + for error in detail[key_name]: + if error.code == 'unique': + return error + return None From 69d9fa7e2601c2011ed11d11835a013597baf078 Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 8 Apr 2019 14:42:50 +0200 Subject: [PATCH 007/106] Kelvin comments --- substrabac/substrapp/views/objective.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 705517f58..7a51bfddc 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -80,7 +80,7 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): finally: try: container = client.containers.get(metrics_docker_name) - container.remove() + container.remove(force=True) except BaseException: pass remove_subtuple_materials(subtuple_directory) From 73394571e93e0b8a3c83e9dfc6599ae7f9b72503 Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 8 Apr 2019 15:19:07 +0200 Subject: [PATCH 008/106] check conflict error for algo, datamanager and datasample --- substrabac/substrapp/views/algo.py | 11 +++++------ substrabac/substrapp/views/datamanager.py | 9 +++++---- substrabac/substrapp/views/datasample.py | 11 +++++++---- substrabac/substrapp/views/utils.py | 19 ++++++++++++++----- 4 files changed, 31 insertions(+), 19 deletions(-) diff --git a/substrabac/substrapp/views/algo.py b/substrabac/substrapp/views/algo.py index 1c233add5..4771a3b14 100644 --- a/substrabac/substrapp/views/algo.py +++ b/substrabac/substrapp/views/algo.py @@ -21,7 +21,7 @@ from substrapp.serializers import LedgerAlgoSerializer, AlgoSerializer from substrapp.utils import queryLedger, get_hash, get_computed_hash, \ uncompress_path -from substrapp.views.utils import get_filters, getObjectFromLedger, ComputeHashMixin, ManageFileMixin, JsonException +from substrapp.views.utils import get_filters, getObjectFromLedger, ComputeHashMixin, ManageFileMixin, JsonException, find_primary_key_error from substrapp.tasks import build_subtuple_folders, remove_subtuple_materials @@ -136,11 +136,10 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except Exception as e: - return Response({ - 'message': e.args, - 'pkhash': pkhash - }, - status=status.HTTP_400_BAD_REQUEST) + conflict_error = find_primary_key_error(e) + st = (status.HTTP_409_CONFLICT if conflict_error else + status.HTTP_400_BAD_REQUEST) + return Response({'message': e.args, 'pkhash': pkhash}, status=st) else: if dryrun: diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index 17a360a79..e8a85d80b 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -17,7 +17,7 @@ from substrapp.serializers.ledger.datamanager.util import updateLedgerDataManager from substrapp.serializers.ledger.datamanager.tasks import updateLedgerDataManagerAsync from substrapp.utils import queryLedger, get_hash -from substrapp.views.utils import get_filters, ManageFileMixin, ComputeHashMixin, JsonException +from substrapp.views.utils import get_filters, ManageFileMixin, ComputeHashMixin, JsonException, find_primary_key_error class DataManagerViewSet(mixins.CreateModelMixin, @@ -85,9 +85,10 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except Exception as e: - return Response({'message': e.args, - 'pkhash': pkhash}, - status=status.HTTP_400_BAD_REQUEST) + conflict_error = find_primary_key_error(e) + st = (status.HTTP_409_CONFLICT if conflict_error else + status.HTTP_400_BAD_REQUEST) + return Response({'message': e.args, 'pkhash': pkhash}, status=st) else: if dryrun: return self.dryrun(data_opener) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 287585056..9346ab83e 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -19,6 +19,7 @@ from substrapp.serializers.ledger.datasample.tasks import updateLedgerDataSampleAsync from substrapp.utils import get_hash, uncompress_path, get_dir_hash from substrapp.tasks import build_subtuple_folders, remove_subtuple_materials +from substrapp.views.utils import find_primary_key_error def path_leaf(path): @@ -212,10 +213,12 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except Exception as e: - return Response({ - 'message': e.args, - 'pkhash': [x['pkhash'] for x in l]}, - status=status.HTTP_409_CONFLICT) + pkhashes = [x['pkhash'] for x in l] + conflict_error = find_primary_key_error(e) + st = (status.HTTP_409_CONFLICT if conflict_error else + status.HTTP_400_BAD_REQUEST) + return Response({'message': e.args, 'pkhash': pkhashes}, status=st) + else: if dryrun: try: diff --git a/substrabac/substrapp/views/utils.py b/substrabac/substrapp/views/utils.py index d47616681..0e9ea8340 100644 --- a/substrabac/substrapp/views/utils.py +++ b/substrabac/substrapp/views/utils.py @@ -111,9 +111,18 @@ def manage_file(self, field): def find_primary_key_error(validation_error, key_name='pkhash'): - for detail in validation_error.detail: - if key_name in detail: - for error in detail[key_name]: - if error.code == 'unique': - return error + detail = validation_error.detail + + if not isinstance(detail, dict): + # XXX according to the rest_framework documentation, + # validation_error.detail could be either a dict, a list or a + # nested data structure + return None + + for key, errors in detail.items(): + if key != key_name: + continue + for error in errors: + if error.code == 'unique': + return error return None From 2d229e3931b4c9c04680e4e73abd6cd01b78be6f Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 8 Apr 2019 15:23:55 +0200 Subject: [PATCH 009/106] fix populate with conflict --- substrabac/populate.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index eaff2a914..38db3b767 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -56,13 +56,6 @@ def create_asset(data, profile, asset, dryrun=False): try: r = client.add(asset, data) except substra.exceptions.HTTPError as e: - if e.response.status_code == status.HTTP_400_BAD_REQUEST: - if 'pkhash' in e.response.json(): - # FIXME server is not correctly responding for some conflict - # cases, overwrite the status code for these cases - print('Bad request should be a conflict') - e.response.status_code = status.HTTP_409_CONFLICT - if e.response.status_code == status.HTTP_408_REQUEST_TIMEOUT: # retry until success in case of timeout print(colored('got a 408, will test to get if from ledger', 'grey')) From 7a669a9b401a243163e36aea893c53a380dc0e0b Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 9 Apr 2019 21:44:27 +0200 Subject: [PATCH 010/106] update after Guillaume comments --- substrabac/substrapp/views/objective.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 7a51bfddc..1160953e6 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -1,4 +1,5 @@ import docker +import logging import os import re import shutil @@ -72,7 +73,6 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): try: client.containers.run(**job_args) - assert os.path.exists(os.path.join(pred_path, 'perf.json')) except ContainerError as e: raise Exception(e.stderr) @@ -81,10 +81,13 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): try: container = client.containers.get(metrics_docker_name) container.remove(force=True) - except BaseException: - pass + except BaseException as e: + logging.error(e, exc_info=True) remove_subtuple_materials(subtuple_directory) + if not os.path.exists(os.path.join(pred_path, 'perf.json')): + raise Exception('Perf file not found') + class ObjectiveViewSet(mixins.CreateModelMixin, mixins.ListModelMixin, @@ -157,7 +160,7 @@ def create(self, request, *args, **kwargs): status=status.HTTP_400_BAD_REQUEST) url_http = 'http' if settings.DEBUG else 'https' - current_site = f'{getattr(settings, "SITE_HOST")}' + current_site = getattr(settings, "SITE_HOST") site_port = getattr(settings, "SITE_PORT", None) if site_port: current_site = f'{current_site}:{site_port}' @@ -174,8 +177,9 @@ def create(self, request, *args, **kwargs): pkhash = re.search(r'\(pkhash\)=\((\w+)\)', exc.args[0]).group(1) except BaseException: pkhash = '' - return Response({'message': 'A objective with this description file already exists.', 'pkhash': pkhash}, - status=status.HTTP_409_CONFLICT) + finally: + return Response({'message': 'A objective with this description file already exists.', 'pkhash': pkhash}, + status=status.HTTP_409_CONFLICT) except Exception as exc: return Response({'message': exc.args}, status=status.HTTP_400_BAD_REQUEST) From 6a3faa435467bd924a7dd0331e1bf55ea71c4860 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 9 Apr 2019 21:52:00 +0200 Subject: [PATCH 011/106] pr cosmetic fix --- substrabac/substrapp/views/algo.py | 7 ++++--- substrabac/substrapp/views/datamanager.py | 7 ++++--- substrabac/substrapp/views/datasample.py | 7 ++++--- substrabac/substrapp/views/objective.py | 7 ++++--- 4 files changed, 16 insertions(+), 12 deletions(-) diff --git a/substrabac/substrapp/views/algo.py b/substrabac/substrapp/views/algo.py index 4771a3b14..af24246ce 100644 --- a/substrabac/substrapp/views/algo.py +++ b/substrabac/substrapp/views/algo.py @@ -136,9 +136,10 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except Exception as e: - conflict_error = find_primary_key_error(e) - st = (status.HTTP_409_CONFLICT if conflict_error else - status.HTTP_400_BAD_REQUEST) + if find_primary_key_error(e): + st = status.HTTP_409_CONFLICT + else: + st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhash}, status=st) else: diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index e8a85d80b..a3e89025a 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -85,9 +85,10 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except Exception as e: - conflict_error = find_primary_key_error(e) - st = (status.HTTP_409_CONFLICT if conflict_error else - status.HTTP_400_BAD_REQUEST) + if find_primary_key_error(e): + st = status.HTTP_409_CONFLICT + else: + st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhash}, status=st) else: if dryrun: diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 9346ab83e..ac3e98a51 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -214,9 +214,10 @@ def create(self, request, *args, **kwargs): serializer.is_valid(raise_exception=True) except Exception as e: pkhashes = [x['pkhash'] for x in l] - conflict_error = find_primary_key_error(e) - st = (status.HTTP_409_CONFLICT if conflict_error else - status.HTTP_400_BAD_REQUEST) + if find_primary_key_error(e): + st = status.HTTP_409_CONFLICT + else: + st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhashes}, status=st) else: diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 5f30101f3..f3555747a 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -141,9 +141,10 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except ValidationError as e: - conflict_error = find_primary_key_error(e) - st = (status.HTTP_409_CONFLICT if conflict_error else - status.HTTP_400_BAD_REQUEST) + if find_primary_key_error(e): + st = status.HTTP_409_CONFLICT + else: + st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhash}, status=st) if dryrun: From e5df1394d27bd87b6080ec4bca25f92c996f9547 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 11 Apr 2019 09:01:18 +0200 Subject: [PATCH 012/106] replace 200 with status.HTTP_200_OK --- substrabac/substrapp/views/objective.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 1160953e6..8805eade4 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -283,7 +283,7 @@ def list(self, request, *args, **kwargs): data = [] if data is None else data objectives = [data] - if st != 200: + if st != status.HTTP_200_OK: return Response(objectives, status=st) dataManagerData = None From bf51af6a1c6f22eb7662d607360b2cb94f93f35a Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 11 Apr 2019 09:24:42 +0200 Subject: [PATCH 013/106] small fixes after review --- substrabac/substrapp/views/algo.py | 3 +-- substrabac/substrapp/views/datamanager.py | 3 +-- substrabac/substrapp/views/datasample.py | 3 +-- substrabac/substrapp/views/objective.py | 3 +-- 4 files changed, 4 insertions(+), 8 deletions(-) diff --git a/substrabac/substrapp/views/algo.py b/substrabac/substrapp/views/algo.py index af24246ce..b8fc01df4 100644 --- a/substrabac/substrapp/views/algo.py +++ b/substrabac/substrapp/views/algo.py @@ -136,10 +136,9 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except Exception as e: + st = status.HTTP_400_BAD_REQUEST if find_primary_key_error(e): st = status.HTTP_409_CONFLICT - else: - st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhash}, status=st) else: diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index a3e89025a..a78f15c28 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -85,10 +85,9 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except Exception as e: + st = status.HTTP_400_BAD_REQUEST if find_primary_key_error(e): st = status.HTTP_409_CONFLICT - else: - st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhash}, status=st) else: if dryrun: diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index ac3e98a51..92e5fc02d 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -214,10 +214,9 @@ def create(self, request, *args, **kwargs): serializer.is_valid(raise_exception=True) except Exception as e: pkhashes = [x['pkhash'] for x in l] + st = status.HTTP_400_BAD_REQUEST if find_primary_key_error(e): st = status.HTTP_409_CONFLICT - else: - st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhashes}, status=st) else: diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index f3555747a..c7a611627 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -141,10 +141,9 @@ def create(self, request, *args, **kwargs): try: serializer.is_valid(raise_exception=True) except ValidationError as e: + st = status.HTTP_400_BAD_REQUEST if find_primary_key_error(e): st = status.HTTP_409_CONFLICT - else: - st = status.HTTP_400_BAD_REQUEST return Response({'message': e.args, 'pkhash': pkhash}, status=st) if dryrun: From bcfc6a1a5557c9cf76b12dbe2d58b95e326176b5 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 11 Apr 2019 11:37:24 +0200 Subject: [PATCH 014/106] remove XXX --- substrabac/substrapp/views/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/substrabac/substrapp/views/utils.py b/substrabac/substrapp/views/utils.py index 0e9ea8340..ec81517ac 100644 --- a/substrabac/substrapp/views/utils.py +++ b/substrabac/substrapp/views/utils.py @@ -114,9 +114,9 @@ def find_primary_key_error(validation_error, key_name='pkhash'): detail = validation_error.detail if not isinstance(detail, dict): - # XXX according to the rest_framework documentation, - # validation_error.detail could be either a dict, a list or a - # nested data structure + # according to the rest_framework documentation, + # validation_error.detail could be either a dict, a list or a nested + # data structure return None for key, errors in detail.items(): From b6d8996aed4234ff8a34f8fd80f3ffe84adf63f3 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 11 Apr 2019 12:01:08 +0200 Subject: [PATCH 015/106] Fix verification position in objective dryrun --- substrabac/substrapp/views/objective.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 100290254..86947157e 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -73,6 +73,8 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): try: client.containers.run(**job_args) + if not os.path.exists(os.path.join(pred_path, 'perf.json')): + raise Exception('Perf file not found') except ContainerError as e: raise Exception(e.stderr) @@ -85,9 +87,6 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): logging.error(e, exc_info=True) remove_subtuple_materials(subtuple_directory) - if not os.path.exists(os.path.join(pred_path, 'perf.json')): - raise Exception('Perf file not found') - class ObjectiveViewSet(mixins.CreateModelMixin, mixins.ListModelMixin, From 754cede5e92ac79b26d7c1e292a22938621f87a2 Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Thu, 11 Apr 2019 14:19:28 +0200 Subject: [PATCH 016/106] Remove django rest framework from populate.py --- substrabac/populate.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 78bcfdf1b..b8d8de577 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -7,7 +7,6 @@ import substra_sdk_py as substra from termcolor import colored -from rest_framework import status dir_path = os.path.dirname(os.path.realpath(__file__)) @@ -56,7 +55,7 @@ def create_asset(data, profile, asset, dryrun=False): try: r = client.add(asset, data) except substra.exceptions.HTTPError as e: - if e.response.status_code == status.HTTP_408_REQUEST_TIMEOUT: + if e.response.status_code == 408: # retry until success in case of timeout print(colored('got a 408, will test to get if from ledger', 'grey')) r = e.response.json() @@ -69,7 +68,7 @@ def create_asset(data, profile, asset, dryrun=False): return results - elif e.response.status_code == status.HTTP_409_CONFLICT: + elif e.response.status_code == 409: r = e.response.json() print(colored(json.dumps(r, indent=2), 'cyan')) return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] @@ -87,7 +86,7 @@ def update_datamanager(data_manager_key, data, profile): try: r = client.update('data_manager', data_manager_key, data) except substra.exceptions.HTTPError as e: - if e.response.status_code != status.HTTP_408_REQUEST_TIMEOUT: + if e.response.status_code != 408: print(colored(e, 'red')) return None From ae9a6c1869dee4d80a99879b66b122488b439cdc Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Tue, 16 Apr 2019 10:03:55 +0200 Subject: [PATCH 017/106] Support new async fabric-sdk-py --- .../fabric-sdk-py-query-invoke.py | 33 ++++++++++--------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/fabric-sdk-py_tests/fabric-sdk-py-query-invoke.py b/fabric-sdk-py_tests/fabric-sdk-py-query-invoke.py index 20c4eb7a7..5bef66917 100644 --- a/fabric-sdk-py_tests/fabric-sdk-py-query-invoke.py +++ b/fabric-sdk-py_tests/fabric-sdk-py-query-invoke.py @@ -1,4 +1,5 @@ import os +import asyncio import subprocess from hfc.fabric import Client @@ -12,6 +13,8 @@ cli.new_channel('mychannel') +loop = asyncio.get_event_loop() + from hfc.fabric_ca.caservice import ca_service cacli = ca_service(target="https://rca-owkin:7054", @@ -56,42 +59,42 @@ finally: print(data) - response = cli.chaincode_query( + response = loop.run_until_complete(cli.chaincode_query( requestor=admin_owkin, channel_name='mychannel', - peer_names=['peer1-owkin'], + peers=['peer1-owkin'], args=[], cc_name='mycc', cc_version='1.0', fcn='queryDataManagers' - ) + )) print(response) - response = cli.query_installed_chaincodes( + response = loop.run_until_complete(cli.query_installed_chaincodes( requestor=admin_owkin, - peer_names=['peer1-owkin'] - ) + peers=['peer1-owkin'] + )) print(response) - response = cli.query_channels( + response = loop.run_until_complete(cli.query_channels( requestor=admin_owkin, - peer_names=['peer1-owkin'] - ) + peers=['peer1-owkin'] + )) print(response) - response = cli.query_info( + response = loop.run_until_complete(cli.query_info( requestor=admin_owkin, channel_name='mychannel', - peer_names=['peer1-owkin'] - ) + peers=['peer1-owkin'] + )) print(response) dir_path = os.path.dirname(os.path.realpath(__file__)) - response = cli.chaincode_invoke( + response = loop.run_until_complete(cli.chaincode_invoke( requestor=admin_owkin, channel_name='mychannel', - peer_names=['peer1-owkin'], + peers=['peer1-owkin'], args=['ISIC 2018', '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd', 'http://chunantes.substrabac:8001/media/data_managers/59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/opener.py', @@ -106,5 +109,5 @@ fcn='registerDataManager', wait_for_event=True, wait_for_event_timeout=5 - ) + )) print(response) From b30d9d365ecf2eb236078262003b3b6b072a424e Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Wed, 17 Apr 2019 11:59:18 +0200 Subject: [PATCH 018/106] Fix find_primary_key_error. --- substrabac/substrapp/views/utils.py | 31 +++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/substrabac/substrapp/views/utils.py b/substrabac/substrapp/views/utils.py index ec81517ac..3d069cfc5 100644 --- a/substrabac/substrapp/views/utils.py +++ b/substrabac/substrapp/views/utils.py @@ -113,16 +113,27 @@ def manage_file(self, field): def find_primary_key_error(validation_error, key_name='pkhash'): detail = validation_error.detail - if not isinstance(detail, dict): - # according to the rest_framework documentation, - # validation_error.detail could be either a dict, a list or a nested - # data structure + def find_unique_error(detail_dict): + for key, errors in detail_dict.items(): + if key != key_name: + continue + for error in errors: + if error.code == 'unique': + return error + return None - for key, errors in detail.items(): - if key != key_name: - continue - for error in errors: - if error.code == 'unique': - return error + # according to the rest_framework documentation, + # validation_error.detail could be either a dict, a list or a nested + # data structure + + if isinstance(detail, dict): + return find_unique_error(detail) + elif isinstance(detail, list): + for sub_detail in detail: + if isinstance(sub_detail, dict): + unique_error = find_unique_error(sub_detail) + if unique_error is not None: + return unique_error + return None From 302434796e12028f8c11b48453dc9e49ea245d7f Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Thu, 18 Apr 2019 09:55:30 +0200 Subject: [PATCH 019/106] Update to hyperledger fabric 1.4.1 --- bootstrap.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bootstrap.sh b/bootstrap.sh index 293812acd..c622836bc 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -3,11 +3,11 @@ BASEDIR=$(dirname "$0") # if version not passed in, default to latest released version -export VERSION=1.3.0 +export VERSION=1.4.1 # if ca version not passed in, default to latest released version export CA_VERSION=$VERSION # current version of thirdparty images (couchdb, kafka and zookeeper) released -export THIRDPARTY_IMAGE_VERSION=0.4.10 +export THIRDPARTY_IMAGE_VERSION=0.4.15 export ARCH=$(echo "$(uname -s|tr '[:upper:]' '[:lower:]'|sed 's/mingw64_nt.*/windows/')-$(uname -m | sed 's/x86_64/amd64/g')") export MARCH=$(uname -m) From 3a5978f7537cb441bcf479c15fae8c433893803b Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 18 Apr 2019 15:21:27 +0200 Subject: [PATCH 020/106] Handle 409 on tuples --- substrabac/substrapp/views/testtuple.py | 10 ++++++---- substrabac/substrapp/views/traintuple.py | 10 +++++++--- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index 565a0ec80..0604ea126 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -61,7 +61,8 @@ def create(self, request, *args, **kwargs): # Get testtuple pkhash of the proposal args = serializer.get_args(serializer.validated_data) data, st = queryLedger({'args': '{"Args":["createTesttuple", ' + args + ']}'}) - pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json + if st in (status.HTTP_200_OK, status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): + pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json # create on ledger data, st = serializer.create(serializer.validated_data) @@ -72,11 +73,12 @@ def create(self, request, *args, **kwargs): if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): try: - pkhash = data['message'].replace('"', '').split('-')[ - -1].strip() + pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() - if not len(pkhash) == 64: + if len(pkhash) != 64: raise Exception('bad pkhash') + else: + st = status.HTTP_409_CONFLICT return Response({'message': data['message'], 'pkhash': pkhash}, status=st) diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index 4750b26c9..de0a78baa 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -73,7 +73,9 @@ def create(self, request, *args, **kwargs): # Get traintuple pkhash of the proposal args = serializer.get_args(serializer.validated_data) data, st = queryLedger({'args': '{"Args":["createTraintuple", ' + args + ']}'}) - pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json + + if st in (status.HTTP_200_OK, status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): + pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json # create on ledger data, st = serializer.create(serializer.validated_data) @@ -84,10 +86,12 @@ def create(self, request, *args, **kwargs): if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): try: - pkhash = data['message'].replace('"', '').split('-')[-1].strip() + pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() - if not len(pkhash) == 64: + if len(pkhash) != 64: raise Exception('bad pkhash') + else: + st = status.HTTP_409_CONFLICT return Response({'message': data['message'], 'pkhash': pkhash}, status=st) From 194c40b3ccdb6853135ff4a945c3db87a0529eab Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 19 Apr 2019 09:31:57 +0200 Subject: [PATCH 021/106] Improve ledger call management. --- substrabac/substrapp/views/testtuple.py | 18 ++++++++++++++++-- substrabac/substrapp/views/traintuple.py | 19 ++++++++++++++++--- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index 0604ea126..95fcb0fcc 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -58,11 +58,25 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) - # Get testtuple pkhash of the proposal + # Get traintuple pkhash of the proposal with a queryLedger in case of 408 timeout args = serializer.get_args(serializer.validated_data) data, st = queryLedger({'args': '{"Args":["createTesttuple", ' + args + ']}'}) - if st in (status.HTTP_200_OK, status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): + if st == status.HTTP_200_OK: pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json + else: + # If queryLedger fails, invoke will fail too so we handle the issue right now + try: + pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() + + if len(pkhash) != 64: + raise Exception('bad pkhash') + else: + st = status.HTTP_409_CONFLICT + + return Response({'message': data['message'], + 'pkhash': pkhash}, status=st) + except: + return Response(data, status=st) # create on ledger data, st = serializer.create(serializer.validated_data) diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index de0a78baa..a08ad8749 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -70,12 +70,25 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) - # Get traintuple pkhash of the proposal + # Get traintuple pkhash of the proposal with a queryLedger in case of 408 timeout args = serializer.get_args(serializer.validated_data) data, st = queryLedger({'args': '{"Args":["createTraintuple", ' + args + ']}'}) - - if st in (status.HTTP_200_OK, status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): + if st == status.HTTP_200_OK: pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json + else: + # If queryLedger fails, invoke will fail too so we handle the issue right now + try: + pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() + + if len(pkhash) != 64: + raise Exception('bad pkhash') + else: + st = status.HTTP_409_CONFLICT + + return Response({'message': data['message'], + 'pkhash': pkhash}, status=st) + except: + return Response(data, status=st) # create on ledger data, st = serializer.create(serializer.validated_data) From 234faddc8c2b97e0a96d353c8ccf52a885e67660 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 19 Apr 2019 14:37:35 +0200 Subject: [PATCH 022/106] Fix naming. --- substrabac/substrapp/views/testtuple.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index 95fcb0fcc..e24892fb4 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -58,7 +58,7 @@ def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) - # Get traintuple pkhash of the proposal with a queryLedger in case of 408 timeout + # Get testtuple pkhash of the proposal with a queryLedger in case of 408 timeout args = serializer.get_args(serializer.validated_data) data, st = queryLedger({'args': '{"Args":["createTesttuple", ' + args + ']}'}) if st == status.HTTP_200_OK: From 537f26fc35922ead9b79c64fe513534a85db5c13 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 18 Apr 2019 09:40:32 +0200 Subject: [PATCH 023/106] Handle json response from chaincode --- substrabac/substrapp/tests/tests_query.py | 2 +- substrabac/substrapp/utils.py | 4 ++++ substrabac/substrapp/views/testtuple.py | 4 +--- substrabac/substrapp/views/traintuple.py | 7 ++----- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/substrabac/substrapp/tests/tests_query.py b/substrabac/substrapp/tests/tests_query.py index 1f24f02f5..35a9d763c 100644 --- a/substrabac/substrapp/tests/tests_query.py +++ b/substrabac/substrapp/tests/tests_query.py @@ -1116,7 +1116,7 @@ def test_add_traintuple_ok(self): mock.patch('substrapp.views.traintuple.queryLedger') as mqueryLedger: raw_pkhash = 'traintuple_pkhash'.encode('utf-8').hex() - mqueryLedger.return_value = (raw_pkhash, status.HTTP_200_OK) + mqueryLedger.return_value = ({'key': raw_pkhash}, status.HTTP_200_OK) mcreate.return_value = {'message': 'Traintuple added in local db waiting for validation. \ The substra network has been notified for adding this Traintuple'}, status.HTTP_202_ACCEPTED diff --git a/substrabac/substrapp/utils.py b/substrabac/substrapp/utils.py index cc1300a9c..098f3876f 100644 --- a/substrabac/substrapp/utils.py +++ b/substrabac/substrapp/utils.py @@ -145,6 +145,10 @@ def invokeLedger(options, sync=False): msg = msg.split('result: status:')[1].split('\n')[0].split('payload:')[1].strip().strip('"') except: pass + else: + msg = json.loads(msg.encode('utf-8').decode('unicode_escape')) + if isinstance(msg, dict): + msg = msg.get('key', msg.get('keys')) finally: data = {'pkhash': msg} diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index e24892fb4..ad0036765 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -1,5 +1,3 @@ -import hashlib - from rest_framework import mixins, status from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet @@ -62,7 +60,7 @@ def create(self, request, *args, **kwargs): args = serializer.get_args(serializer.validated_data) data, st = queryLedger({'args': '{"Args":["createTesttuple", ' + args + ']}'}) if st == status.HTTP_200_OK: - pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json + pkhash = data.get('key', data.get('keys')) else: # If queryLedger fails, invoke will fail too so we handle the issue right now try: diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index a08ad8749..9ada05623 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -1,11 +1,7 @@ -import hashlib - -from django.conf import settings from rest_framework import mixins, status from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet - from substrapp.serializers import LedgerTrainTupleSerializer from substrapp.utils import queryLedger from substrapp.views.utils import JsonException @@ -74,7 +70,7 @@ def create(self, request, *args, **kwargs): args = serializer.get_args(serializer.validated_data) data, st = queryLedger({'args': '{"Args":["createTraintuple", ' + args + ']}'}) if st == status.HTTP_200_OK: - pkhash = bytes.fromhex(data.rstrip()).decode('utf-8') # fail in queryLedger because it's a string hash and not a json + pkhash = data.get('key', data.get('keys')) else: # If queryLedger fails, invoke will fail too so we handle the issue right now try: @@ -89,6 +85,7 @@ def create(self, request, *args, **kwargs): 'pkhash': pkhash}, status=st) except: return Response(data, status=st) + pkhash = data.get('key', data.get('keys')) # create on ledger data, st = serializer.create(serializer.validated_data) From b91e3ccea2ddcd954fae81cd7c43afb609324843 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 18 Apr 2019 09:42:59 +0200 Subject: [PATCH 024/106] Remove unecessary check --- substrabac/substrapp/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/substrabac/substrapp/utils.py b/substrabac/substrapp/utils.py index 098f3876f..268f8e9f2 100644 --- a/substrabac/substrapp/utils.py +++ b/substrabac/substrapp/utils.py @@ -147,8 +147,7 @@ def invokeLedger(options, sync=False): pass else: msg = json.loads(msg.encode('utf-8').decode('unicode_escape')) - if isinstance(msg, dict): - msg = msg.get('key', msg.get('keys')) + msg = msg.get('key', msg.get('keys')) # get pkhash finally: data = {'pkhash': msg} From 14492c81e49b7d1f9e73415fa51d3d27e60faa10 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 19 Apr 2019 14:46:58 +0200 Subject: [PATCH 025/106] Fix rebase. --- substrabac/substrapp/views/traintuple.py | 1 - 1 file changed, 1 deletion(-) diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index 9ada05623..3aec70b18 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -85,7 +85,6 @@ def create(self, request, *args, **kwargs): 'pkhash': pkhash}, status=st) except: return Response(data, status=st) - pkhash = data.get('key', data.get('keys')) # create on ledger data, st = serializer.create(serializer.validated_data) From 76b6579a084a564867ff8e7f088a231e69dfef16 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 12 Apr 2019 09:51:20 +0200 Subject: [PATCH 026/106] Add tag on traintuple and testtuple. --- substrabac/populate.py | 1 + .../substrapp/serializers/ledger/testtuple/serializer.py | 5 ++++- .../substrapp/serializers/ledger/traintuple/serializer.py | 5 ++++- substrabac/substrapp/views/testtuple.py | 2 ++ substrabac/substrapp/views/traintuple.py | 2 ++ 5 files changed, 13 insertions(+), 2 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index b8d8de577..91b27874b 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -257,6 +257,7 @@ def update_datamanager(data_manager_key, data, profile): 'algo_key': algo_key, 'data_manager_key': data_manager_org1_key, 'train_data_sample_keys': train_data_sample_keys, + 'tag': 'substra' } traintuple_key = create_asset(data, org_1, 'traintuple') diff --git a/substrabac/substrapp/serializers/ledger/testtuple/serializer.py b/substrabac/substrapp/serializers/ledger/testtuple/serializer.py index bb37825c3..c9f5ccbad 100644 --- a/substrabac/substrapp/serializers/ledger/testtuple/serializer.py +++ b/substrabac/substrapp/serializers/ledger/testtuple/serializer.py @@ -12,16 +12,19 @@ class LedgerTestTupleSerializer(serializers.Serializer): test_data_sample_keys = serializers.ListField(child=serializers.CharField(min_length=64, max_length=64), min_length=0, required=False) + tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False) def get_args(self, validated_data): traintuple_key = validated_data.get('traintuple_key') data_manager_key = validated_data.get('data_manager_key', '') test_data_sample_keys = validated_data.get('test_data_sample_keys', []) + tag = validated_data.get('tag', '') - args = '"%(traintupleKey)s", "%(dataManagerKey)s", "%(dataSampleKeys)s"' % { + args = '"%(traintupleKey)s", "%(dataManagerKey)s", "%(dataSampleKeys)s", "%(tag)s"' % { 'traintupleKey': traintuple_key, 'dataManagerKey': data_manager_key, 'dataSampleKeys': ','.join(test_data_sample_keys), + 'tag': tag } return args diff --git a/substrabac/substrapp/serializers/ledger/traintuple/serializer.py b/substrabac/substrapp/serializers/ledger/traintuple/serializer.py index 2ca52ff82..ec8401a26 100644 --- a/substrabac/substrapp/serializers/ledger/traintuple/serializer.py +++ b/substrabac/substrapp/serializers/ledger/traintuple/serializer.py @@ -16,6 +16,7 @@ class LedgerTrainTupleSerializer(serializers.Serializer): required=False) train_data_sample_keys = serializers.ListField(child=serializers.CharField(min_length=64, max_length=64), min_length=1) + tag = serializers.CharField(min_length=0, max_length=64, allow_blank=True, required=False) def get_args(self, validated_data): algo_key = validated_data.get('algo_key') @@ -25,14 +26,16 @@ def get_args(self, validated_data): FLtask_key = validated_data.get('FLtask_key', '') train_data_sample_keys = validated_data.get('train_data_sample_keys', []) in_models_keys = validated_data.get('in_models_keys') + tag = validated_data.get('tag', '') - args = '"%(algoKey)s", "%(inModels)s", "%(dataManagerKey)s", "%(dataSampleKeys)s", "%(FLtask)s", "%(rank)s"' % { + args = '"%(algoKey)s", "%(inModels)s", "%(dataManagerKey)s", "%(dataSampleKeys)s", "%(FLtask)s", "%(rank)s", "%(tag)s"' % { 'algoKey': algo_key, 'rank': rank, 'FLtask': FLtask_key, 'inModels': ','.join(in_models_keys), 'dataManagerKey': data_manager_key, 'dataSampleKeys': ','.join(train_data_sample_keys), + 'tag': tag } return args diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index ad0036765..383ef02e7 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -40,6 +40,7 @@ def create(self, request, *args, **kwargs): traintuple_key = request.data.get('traintuple_key', request.POST.get('traintuple_key', None)) data_manager_key = request.data.get('data_manager_key', request.POST.get('data_manager_key', '')) + tag = request.data.get('tag', request.POST.get('tag', '')) try: test_data_sample_keys = request.data.getlist('test_data_sample_keys', []) @@ -50,6 +51,7 @@ def create(self, request, *args, **kwargs): 'traintuple_key': traintuple_key, 'data_manager_key': data_manager_key, 'test_data_sample_keys': test_data_sample_keys, # list of test data keys + 'tag': tag } # init ledger serializer diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index 3aec70b18..38bfd09a0 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -42,6 +42,7 @@ def create(self, request, *args, **kwargs): data_manager_key = request.data.get('data_manager_key', request.POST.get('data_manager_key', None)) rank = request.data.get('rank', request.POST.get('rank', None)) FLtask_key = request.data.get('FLtask_key', request.POST.get('FLtask_key', '')) + tag = request.data.get('tag', request.POST.get('tag', '')) try: in_models_keys = request.data.getlist('in_models_keys', []) @@ -60,6 +61,7 @@ def create(self, request, *args, **kwargs): 'FLtask_key': FLtask_key, 'in_models_keys': in_models_keys, 'train_data_sample_keys': train_data_sample_keys, # list of train data keys (which are stored in the train worker node) + 'tag': tag } # init ledger serializer From f6259f63a60ece8f29b5b80e134deeea7277fee3 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 15 Apr 2019 09:53:46 +0200 Subject: [PATCH 027/106] Update objective view with non mandataory test data manager and test data sample(s). --- .../serializers/ledger/objective/serializer.py | 7 ++++--- substrabac/substrapp/views/objective.py | 12 ++++++++++-- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/substrabac/substrapp/serializers/ledger/objective/serializer.py b/substrabac/substrapp/serializers/ledger/objective/serializer.py index 7376ef03d..372c336c1 100644 --- a/substrabac/substrapp/serializers/ledger/objective/serializer.py +++ b/substrabac/substrapp/serializers/ledger/objective/serializer.py @@ -10,9 +10,10 @@ class LedgerObjectiveSerializer(serializers.Serializer): test_data_sample_keys = serializers.ListField(child=serializers.CharField(min_length=64, max_length=64), - min_length=1) + min_length=0, + required=False) name = serializers.CharField(min_length=1, max_length=100) - test_data_manager_key = serializers.CharField(max_length=256) + test_data_manager_key = serializers.CharField(max_length=256, allow_blank=True, required=False) permissions = serializers.CharField(min_length=1, max_length=60) metrics_name = serializers.CharField(min_length=1, max_length=100) @@ -21,7 +22,7 @@ def create(self, validated_data): name = validated_data.get('name') metrics_name = validated_data.get('metrics_name') permissions = validated_data.get('permissions') - test_data_manager_key = validated_data.get('test_data_manager_key') + test_data_manager_key = validated_data.get('test_data_manager_key', '') test_data_sample_keys = validated_data.get('test_data_sample_keys', []) # TODO, create a datamigration with new Site domain name when we will know the name of the final website diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 86947157e..b59be6062 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -37,6 +37,9 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): shutil.copy2(metrics_path, os.path.join(subtuple_directory, 'metrics/metrics.py')) os.remove(metrics_path) + if not test_data_manager_key: + raise Exception('Cannot do a objective dryrun without a data manager key.') + datamanager = getObjectFromLedger(test_data_manager_key, 'queryDataManager') opener_content, opener_computed_hash = get_computed_hash(datamanager['opener']['storageAddress']) with open(os.path.join(subtuple_directory, 'opener/opener.py'), 'wb') as opener_file: @@ -131,8 +134,13 @@ def create(self, request, *args, **kwargs): dryrun = data.get('dryrun', False) description = data.get('description') - test_data_manager_key = data.get('test_data_manager_key') - test_data_sample_keys = data.getlist('test_data_sample_keys') + test_data_manager_key = request.data.get('test_data_manager_key', request.POST.get('test_data_manager_key', '')) + + try: + test_data_sample_keys = request.data.getlist('test_data_sample_keys', []) + except: + test_data_sample_keys = request.data.get('test_data_sample_keys', request.POST.getlist('test_data_sample_keys', [])) + metrics = data.get('metrics') pkhash = get_hash(description) From 4927fa6ff08eec68364fd5c1b375a57cd1f66ed7 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 15 Apr 2019 10:53:37 +0200 Subject: [PATCH 028/106] Move objective link from algo to traintuple. --- .../serializers/ledger/algo/serializer.py | 5 +- .../ledger/traintuple/serializer.py | 5 +- substrabac/substrapp/views/algo.py | 157 +----------------- substrabac/substrapp/views/traintuple.py | 2 + 4 files changed, 10 insertions(+), 159 deletions(-) diff --git a/substrabac/substrapp/serializers/ledger/algo/serializer.py b/substrabac/substrapp/serializers/ledger/algo/serializer.py index d04cb9442..e6d53ed1c 100644 --- a/substrabac/substrapp/serializers/ledger/algo/serializer.py +++ b/substrabac/substrapp/serializers/ledger/algo/serializer.py @@ -10,14 +10,12 @@ class LedgerAlgoSerializer(serializers.Serializer): name = serializers.CharField(min_length=1, max_length=100) - objective_key = serializers.CharField(min_length=64, max_length=64) permissions = serializers.CharField(min_length=1, max_length=60) def create(self, validated_data): instance = self.initial_data.get('instance') name = validated_data.get('name') permissions = validated_data.get('permissions') - objective_key = validated_data.get('objective_key') # TODO, create a datamigration with new Site domain name when we will know the name of the final website # current_site = Site.objects.get_current() @@ -25,13 +23,12 @@ def create(self, validated_data): protocol = 'https://' if request.is_secure() else 'http://' host = '' if request is None else request.get_host() - args = '"%(name)s", "%(algoHash)s", "%(storageAddress)s", "%(descriptionHash)s", "%(descriptionStorageAddress)s", "%(associatedObjective)s", "%(permissions)s"' % { + args = '"%(name)s", "%(algoHash)s", "%(storageAddress)s", "%(descriptionHash)s", "%(descriptionStorageAddress)s", "%(permissions)s"' % { 'name': name, 'algoHash': get_hash(instance.file), 'storageAddress': protocol + host + reverse('substrapp:algo-file', args=[instance.pk]), 'descriptionHash': get_hash(instance.description), 'descriptionStorageAddress': protocol + host + reverse('substrapp:algo-description', args=[instance.pk]), - 'associatedObjective': objective_key, 'permissions': permissions } diff --git a/substrabac/substrapp/serializers/ledger/traintuple/serializer.py b/substrabac/substrapp/serializers/ledger/traintuple/serializer.py index ec8401a26..c731f1bd4 100644 --- a/substrabac/substrapp/serializers/ledger/traintuple/serializer.py +++ b/substrabac/substrapp/serializers/ledger/traintuple/serializer.py @@ -9,6 +9,7 @@ class LedgerTrainTupleSerializer(serializers.Serializer): algo_key = serializers.CharField(min_length=64, max_length=64) data_manager_key = serializers.CharField(min_length=64, max_length=64) + objective_key = serializers.CharField(min_length=64, max_length=64) rank = serializers.IntegerField(allow_null=True, required=False) FLtask_key = serializers.CharField(min_length=64, max_length=64, allow_blank=True, required=False) in_models_keys = serializers.ListField(child=serializers.CharField(min_length=64, max_length=64), @@ -21,6 +22,7 @@ class LedgerTrainTupleSerializer(serializers.Serializer): def get_args(self, validated_data): algo_key = validated_data.get('algo_key') data_manager_key = validated_data.get('data_manager_key') + objective_key = validated_data.get('objective_key') rank = validated_data.get('rank', '') rank = '' if rank is None else rank # rank should be an integer or empty string, not None FLtask_key = validated_data.get('FLtask_key', '') @@ -28,13 +30,14 @@ def get_args(self, validated_data): in_models_keys = validated_data.get('in_models_keys') tag = validated_data.get('tag', '') - args = '"%(algoKey)s", "%(inModels)s", "%(dataManagerKey)s", "%(dataSampleKeys)s", "%(FLtask)s", "%(rank)s", "%(tag)s"' % { + args = '"%(algoKey)s", "%(inModels)s", "%(dataManagerKey)s", "%(dataSampleKeys)s", "%(FLtask)s", "%(rank)s", "%(associatedObjective)s", "%(tag)s"' % { 'algoKey': algo_key, 'rank': rank, 'FLtask': FLtask_key, 'inModels': ','.join(in_models_keys), 'dataManagerKey': data_manager_key, 'dataSampleKeys': ','.join(train_data_sample_keys), + 'associatedObjective': objective_key, 'tag': tag } diff --git a/substrabac/substrapp/views/algo.py b/substrabac/substrapp/views/algo.py index b8fc01df4..1cdd0426c 100644 --- a/substrabac/substrapp/views/algo.py +++ b/substrabac/substrapp/views/algo.py @@ -1,109 +1,18 @@ -import docker -import os import tempfile import requests -from django.conf import settings from django.http import Http404 -from docker.errors import ContainerError from rest_framework import status, mixins from rest_framework.decorators import action from rest_framework.exceptions import ValidationError from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet -from rest_framework.reverse import reverse - - -from substrabac.celery import app from substrapp.models import Algo from substrapp.serializers import LedgerAlgoSerializer, AlgoSerializer -from substrapp.utils import queryLedger, get_hash, get_computed_hash, \ - uncompress_path +from substrapp.utils import queryLedger, get_hash from substrapp.views.utils import get_filters, getObjectFromLedger, ComputeHashMixin, ManageFileMixin, JsonException, find_primary_key_error -from substrapp.tasks import build_subtuple_folders, remove_subtuple_materials - - -@app.task(bind=True, ignore_result=False) -def compute_dryrun(self, algo_path, objective_key, pkhash): - - try: - subtuple_directory = build_subtuple_folders({'key': pkhash}) - - uncompress_path(algo_path, subtuple_directory) - os.remove(algo_path) - - try: - objective = getObjectFromLedger(objective_key, 'queryObjective') - except JsonException as e: - raise e - else: - metrics_content, metrics_computed_hash = get_computed_hash(objective['metrics']['storageAddress']) - with open(os.path.join(subtuple_directory, 'metrics/metrics.py'), 'wb') as metrics_file: - metrics_file.write(metrics_content) - datamanager_key = objective['testDataset']['dataManagerKey'] - - try: - datamanager = getObjectFromLedger(datamanager_key, 'queryDataManager') - except JsonException as e: - raise e - else: - opener_content, opener_computed_hash = get_computed_hash(datamanager['opener']['storageAddress']) - with open(os.path.join(subtuple_directory, 'opener/opener.py'), 'wb') as opener_file: - opener_file.write(opener_content) - - # Launch verification - client = docker.from_env() - opener_file = os.path.join(subtuple_directory, 'opener/opener.py') - metrics_file = os.path.join(subtuple_directory, 'metrics/metrics.py') - pred_path = os.path.join(subtuple_directory, 'pred') - model_path = os.path.join(subtuple_directory, 'model') - - algo_docker = 'algo_dry_run' # tag must be lowercase for docker - algo_docker_name = f'{algo_docker}_{pkhash}' - algo_path = subtuple_directory - volumes = {pred_path: {'bind': '/sandbox/pred', 'mode': 'rw'}, - metrics_file: {'bind': '/sandbox/metrics/__init__.py', 'mode': 'ro'}, - opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}, - model_path: {'bind': '/sandbox/model', 'mode': 'rw'}} - - dockerfile_path = os.path.join(algo_path, 'Dockerfile') - if not os.path.exists(dockerfile_path): - raise Exception('Missing dockerfile in the algo archive.') - - client.images.build(path=algo_path, - tag=algo_docker, - rm=True) - - job_args = {'image': algo_docker, - 'name': algo_docker_name, - 'cpuset_cpus': '0-1', - 'mem_limit': '1G', - 'command': '--dry-run', - 'volumes': volumes, - 'shm_size': '8G', - 'labels': ['dryrun'], - 'detach': False, - 'auto_remove': False, - 'remove': False} - - client.containers.run(**job_args) - - except ContainerError as e: - raise Exception(e.stderr) - except Exception as e: - raise str(e) - finally: - try: - container = client.containers.get(algo_docker_name) - container.remove() - client.images.remove(algo_docker, force=True) - except: - pass - remove_subtuple_materials(subtuple_directory) - if os.path.exists(algo_path): - os.remove(algo_path) class AlgoViewSet(mixins.CreateModelMixin, @@ -122,10 +31,7 @@ def perform_create(self, serializer): def create(self, request, *args, **kwargs): data = request.data - dryrun = data.get('dryrun', False) - file = data.get('file') - objective_key = data.get('objective_key') pkhash = get_hash(file) serializer = self.get_serializer(data={ 'pkhash': pkhash, @@ -142,26 +48,6 @@ def create(self, request, *args, **kwargs): return Response({'message': e.args, 'pkhash': pkhash}, status=st) else: - if dryrun: - try: - algo_path = os.path.join(getattr(settings, 'DRYRUN_ROOT'), f'algo_{pkhash}.tar.gz') - with open(algo_path, 'wb') as algo_file: - algo_file.write(file.open().read()) - - task = compute_dryrun.apply_async((algo_path, objective_key, pkhash), queue=f"{settings.LEDGER['name']}.dryrunner") - url_http = 'http' if settings.DEBUG else 'https' - site_port = getattr(settings, "SITE_PORT", None) - current_site = f'{getattr(settings, "SITE_HOST")}' - if site_port: - current_site = f'{current_site}:{site_port}' - task_route = f'{url_http}://{current_site}{reverse("substrapp:task-detail", args=[task.id])}' - msg = f'Your dry-run has been taken in account. You can follow the task execution on {task_route}' - except Exception as e: - return Response({'message': f'Could not launch algo creation with dry-run on this instance: {str(e)}'}, - status=status.HTTP_400_BAD_REQUEST) - else: - return Response({'id': task.id, 'message': msg}, status=status.HTTP_202_ACCEPTED) - # create on db try: instance = self.perform_create(serializer) @@ -172,7 +58,6 @@ def create(self, request, *args, **kwargs): # init ledger serializer ledger_serializer = LedgerAlgoSerializer(data={'name': data.get('name'), 'permissions': data.get('permissions', 'all'), - 'objective_key': objective_key, 'instance': instance}, context={'request': request}) if not ledger_serializer.is_valid(): @@ -193,7 +78,7 @@ def create(self, request, *args, **kwargs): def create_or_update_algo(self, algo, pk): try: - # get objective description from remote node + # get algo description from remote node url = algo['description']['storageAddress'] try: r = requests.get(url, headers={'Accept': 'application/json;version=0.0'}) # TODO pass cert @@ -277,8 +162,7 @@ def list(self, request, *args, **kwargs): data, st = queryLedger({ 'args': '{"Args":["queryAlgos"]}' }) - objectiveData = None - datamanagerData = None + modelData = None # init list to return @@ -308,41 +192,6 @@ def list(self, request, *args, **kwargs): if k == 'algo': # filter by own key for key, val in subfilters.items(): l[idx] = [x for x in l[idx] if x[key] in val] - elif k == 'objective': # select objective used by these datamanagers - st = None - if not objectiveData: - # TODO find a way to put this call in cache - objectiveData, st = queryLedger({ - 'args': '{"Args":["queryObjectives"]}' - }) - - if st != status.HTTP_200_OK: - return Response(objectiveData, status=st) - if objectiveData is None: - objectiveData = [] - - for key, val in subfilters.items(): - if key == 'metrics': # specific to nested metrics - filteredData = [x for x in objectiveData if x[key]['name'] in val] - else: - filteredData = [x for x in objectiveData if x[key] in val] - objectiveKeys = [x['key'] for x in filteredData] - l[idx] = [x for x in l[idx] if x['objectiveKey'] in objectiveKeys] - elif k == 'dataset': # select objective used by these algo - if not datamanagerData: - # TODO find a way to put this call in cache - datamanagerData, st = queryLedger({ - 'args': '{"Args":["queryDataManagers"]}' - }) - if st != status.HTTP_200_OK: - return Response(datamanagerData, status=st) - if datamanagerData is None: - datamanagerData = [] - - for key, val in subfilters.items(): - filteredData = [x for x in datamanagerData if x[key] in val] - objectiveKeys = [x['objectiveKey'] for x in filteredData] - l[idx] = [x for x in l[idx] if x['objectiveKey'] in objectiveKeys] elif k == 'model': # select objectives used by outModel hash if not modelData: # TODO find a way to put this call in cache diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index 38bfd09a0..478e291eb 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -40,6 +40,7 @@ def create(self, request, *args, **kwargs): algo_key = request.data.get('algo_key', request.POST.get('algo_key', None)) data_manager_key = request.data.get('data_manager_key', request.POST.get('data_manager_key', None)) + objective_key = request.data.get('objective_key', request.POST.get('objective_key', None)) rank = request.data.get('rank', request.POST.get('rank', None)) FLtask_key = request.data.get('FLtask_key', request.POST.get('FLtask_key', '')) tag = request.data.get('tag', request.POST.get('tag', '')) @@ -57,6 +58,7 @@ def create(self, request, *args, **kwargs): data = { 'algo_key': algo_key, 'data_manager_key': data_manager_key, + 'objective_key': objective_key, 'rank': rank, 'FLtask_key': FLtask_key, 'in_models_keys': in_models_keys, From 9f471a5fd5c5dbfb3aa61f58ab88b453fcd22925 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 18 Apr 2019 10:40:25 +0200 Subject: [PATCH 029/106] Fix tests. --- substrabac/substrapp/tests/tests_query.py | 3 ++ substrabac/substrapp/tests/tests_views.py | 59 ----------------------- 2 files changed, 3 insertions(+), 59 deletions(-) diff --git a/substrabac/substrapp/tests/tests_query.py b/substrabac/substrapp/tests/tests_query.py index 35a9d763c..b02deaadb 100644 --- a/substrabac/substrapp/tests/tests_query.py +++ b/substrabac/substrapp/tests/tests_query.py @@ -1104,6 +1104,7 @@ def test_add_traintuple_ok(self): '5c1d9cd1c2c1082dde0921b56d11030c81f62fbb51932758b58ac2569dd0b422'], 'algo_key': '5c1d9cd1c2c1082dde0921b56d11030c81f62fbb51932758b58ac2569dd0a088', 'data_manager_key': '5c1d9cd1c2c1082dde0921b56d11030c81f62fbb51932758b58ac2569dd0a088', + 'objective_key': '5c1d9cd1c2c1082dde0921b56d11030c81f62fbb51932758b58ac2569dd0a088', 'rank': -1, 'FLtask_key': '5c1d9cd1c2c1082dde0921b56d11030c81f62fbb51932758b58ac2569dd0a088', 'in_models_keys': [ @@ -1121,6 +1122,8 @@ def test_add_traintuple_ok(self): The substra network has been notified for adding this Traintuple'}, status.HTTP_202_ACCEPTED response = self.client.post(url, data, format='multipart', **extra) + + print(response.json()) self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) def test_add_traintuple_ko(self): diff --git a/substrabac/substrapp/tests/tests_views.py b/substrabac/substrapp/tests/tests_views.py index 289fc2db1..5e46e633e 100644 --- a/substrabac/substrapp/tests/tests_views.py +++ b/substrabac/substrapp/tests/tests_views.py @@ -17,7 +17,6 @@ from substrapp.views.utils import JsonException, ComputeHashMixin, getObjectFromLedger from substrapp.views.datasample import path_leaf, compute_dryrun as data_sample_compute_dryrun from substrapp.views.objective import compute_dryrun as objective_compute_dryrun -from substrapp.views.algo import compute_dryrun as algo_compute_dryrun from substrapp.utils import compute_hash, get_hash from substrapp.models import DataManager @@ -499,64 +498,6 @@ def test_algo_create(self): data['description'].close() data['file'].close() - def test_algo_create_dryrun(self): - - url = reverse('substrapp:algo-list') - - dir_path = os.path.dirname(os.path.realpath(__file__)) - - algo_path = os.path.join(dir_path, '../../fixtures/chunantes/algos/algo3/algo.tar.gz') - description_path = os.path.join(dir_path, '../../fixtures/chunantes/algos/algo3/description.md') - - data = {'name': 'Logistic regression', - 'file': open(algo_path, 'rb'), - 'description': open(description_path, 'rb'), - 'objective_key': get_hash(os.path.join(dir_path, '../../fixtures/chunantes/objectives/objective0/description.md')), - 'permissions': 'all', - 'dryrun': True} - - with mock.patch('substrapp.views.algo.compute_dryrun.apply_async') as mdryrun_task: - - mdryrun_task.return_value = FakeTask('42') - response = self.client.post(url, data=data, format='multipart', **self.extra) - - self.assertEqual(response.data['id'], '42') - self.assertEqual(response.data['message'], 'Your dry-run has been taken in account. You can follow the task execution on https://localhost/task/42/') - self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) - - data['description'].close() - data['file'].close() - - def test_algo_compute_dryrun(self): - - dir_path = os.path.dirname(os.path.realpath(__file__)) - - algo_path = os.path.join(dir_path, '../../fixtures/chunantes/algos/algo3/algo.tar.gz') - shutil.copy(algo_path, os.path.join(MEDIA_ROOT, 'algo.tar.gz')) - - metrics_path = os.path.join(dir_path, '../../fixtures/chunantes/objectives/objective0/metrics.py') - with open(metrics_path, 'rb') as f: - metrics_content = f.read() - metrics_pkhash = compute_hash(metrics_content) - - opener_path = os.path.join(dir_path, '../../fixtures/owkin/datamanagers/datamanager0/opener.py') - with open(opener_path, 'rb') as f: - opener_content = f.read() - opener_pkhash = compute_hash(opener_content) - - with mock.patch('substrapp.views.algo.getObjectFromLedger') as mgetObjectFromLedger,\ - mock.patch('substrapp.views.algo.get_computed_hash') as mget_computed_hash: - mgetObjectFromLedger.side_effect = [{'metrics': {'storageAddress': 'test'}, - 'testDataset': {'dataManagerKey': 'test'}}, - {'opener': {'storageAddress': 'test'}}] - mget_computed_hash.side_effect = [(metrics_content, metrics_pkhash), (opener_content, opener_pkhash)] - - objective_key = get_hash(os.path.join(dir_path, '../../fixtures/chunantes/objectives/objective0/description.md')) - pkhash = get_hash(algo_path) - - # Slow operation, about 45 s, will fail if no internet connection - algo_compute_dryrun(os.path.join(MEDIA_ROOT, 'algo.tar.gz'), objective_key, pkhash) - # APITestCase @override_settings(MEDIA_ROOT=MEDIA_ROOT) From d3101662a9e5156a7b6d16c056e98e663aa138ec Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 18 Apr 2019 11:37:11 +0200 Subject: [PATCH 030/106] Fix minor issues. --- substrabac/populate.py | 20 ++++++++++++++++--- .../ledger/traintuple/serializer.py | 8 ++++---- 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 91b27874b..4bf4468e4 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -204,6 +204,19 @@ def update_datamanager(data_manager_key, data, profile): #################################################### + print('register objective without data manager and data sample') + data = { + 'name': 'Skin Lesion Classification Objective', + 'description': os.path.join(dir_path, './fixtures/owkin/objectives/objective0/description.md'), + 'metrics_name': 'macro-average recall', + 'metrics': os.path.join(dir_path, './fixtures/owkin/objectives/objective0/metrics.py'), + 'permissions': 'all' + } + + objective_key_test = create_asset(data, org_0, 'objective', True) + + #################################################### + # update datamanager print('update datamanager') data = { @@ -220,7 +233,6 @@ def update_datamanager(data_manager_key, data, profile): 'name': 'Logistic regression', 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/algo.tar.gz'), 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/description.md'), - 'objective_key': objective_key, 'permissions': 'all', } algo_key = create_asset(data, org_1, 'algo', True) @@ -232,7 +244,6 @@ def update_datamanager(data_manager_key, data, profile): 'name': 'Neural Network', 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo0/algo.tar.gz'), 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo0/description.md'), - 'objective_key': objective_key, 'permissions': 'all', } algo_key_2 = create_asset(data, org_1, 'algo', False) @@ -243,7 +254,6 @@ def update_datamanager(data_manager_key, data, profile): 'name': 'Random Forest', 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo4/algo.tar.gz'), 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo4/description.md'), - 'objective_key': objective_key, 'permissions': 'all', } algo_key_3 = create_asset(data, org_1, 'algo', False) @@ -255,6 +265,7 @@ def update_datamanager(data_manager_key, data, profile): print('create traintuple') data = { 'algo_key': algo_key, + 'objective_key': objective_key, 'data_manager_key': data_manager_org1_key, 'train_data_sample_keys': train_data_sample_keys, 'tag': 'substra' @@ -265,7 +276,9 @@ def update_datamanager(data_manager_key, data, profile): data = { 'algo_key': algo_key_2, 'data_manager_key': data_manager_org1_key, + 'objective_key': objective_key, 'train_data_sample_keys': train_data_sample_keys, + 'tag': 'My super tag' } traintuple_key_2 = create_asset(data, org_1, 'traintuple') @@ -274,6 +287,7 @@ def update_datamanager(data_manager_key, data, profile): data = { 'algo_key': algo_key_3, 'data_manager_key': data_manager_org1_key, + 'objective_key': objective_key, 'train_data_sample_keys': train_data_sample_keys, } diff --git a/substrabac/substrapp/serializers/ledger/traintuple/serializer.py b/substrabac/substrapp/serializers/ledger/traintuple/serializer.py index c731f1bd4..970a67ba8 100644 --- a/substrabac/substrapp/serializers/ledger/traintuple/serializer.py +++ b/substrabac/substrapp/serializers/ledger/traintuple/serializer.py @@ -30,14 +30,14 @@ def get_args(self, validated_data): in_models_keys = validated_data.get('in_models_keys') tag = validated_data.get('tag', '') - args = '"%(algoKey)s", "%(inModels)s", "%(dataManagerKey)s", "%(dataSampleKeys)s", "%(FLtask)s", "%(rank)s", "%(associatedObjective)s", "%(tag)s"' % { + args = '"%(algoKey)s", "%(associatedObjective)s", "%(inModels)s", "%(dataManagerKey)s", "%(dataSampleKeys)s", "%(FLtask)s", "%(rank)s", "%(tag)s"' % { 'algoKey': algo_key, - 'rank': rank, - 'FLtask': FLtask_key, + 'associatedObjective': objective_key, 'inModels': ','.join(in_models_keys), 'dataManagerKey': data_manager_key, 'dataSampleKeys': ','.join(train_data_sample_keys), - 'associatedObjective': objective_key, + 'FLtask': FLtask_key, + 'rank': rank, 'tag': tag } From 8d1a4d090ee4d335f14b69683b968f99d1673005 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 18 Apr 2019 11:49:43 +0200 Subject: [PATCH 031/106] Fix potential conflict on docker name --- substrabac/substrapp/tests/tests_tasks.py | 6 ++++-- substrabac/substrapp/views/datasample.py | 3 ++- substrabac/substrapp/views/objective.py | 3 ++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/substrabac/substrapp/tests/tests_tasks.py b/substrabac/substrapp/tests/tests_tasks.py index a2df3e6f3..c289be5ea 100644 --- a/substrabac/substrapp/tests/tests_tasks.py +++ b/substrabac/substrapp/tests/tests_tasks.py @@ -2,6 +2,7 @@ import shutil import mock import time +import uuid from django.test import override_settings from django.http import HttpResponse @@ -381,9 +382,10 @@ def test_compute_docker(self): with open(dockerfile_path, 'w') as f: f.write('FROM library/hello-world') + hash_docker = uuid.uuid4().hex result = compute_docker(client, self.ResourcesManager, - self.subtuple_path, 'test_compute_docker', - 'test_compute_docker_name', None, None) + self.subtuple_path, 'test_compute_docker_' + hash_docker, + 'test_compute_docker_name_' + hash_docker, None, None) self.assertIsNone(cpu_set) self.assertIsNone(gpu_set) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 92e5fc02d..84d4b5202 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -1,6 +1,7 @@ import docker import os import ntpath +import uuid from django.conf import settings from docker.errors import ContainerError @@ -62,7 +63,7 @@ def compute_dryrun(self, data_sample_files, data_manager_keys): data_sample_docker_path = os.path.join(getattr(settings, 'PROJECT_ROOT'), 'fake_data_sample') # fake_data comes with substrabac data_docker = 'data_dry_run' # tag must be lowercase for docker - data_docker_name = f'{data_docker}_{pkhash}' + data_docker_name = f'{data_docker}_{pkhash}_{uuid.uuid4().hex}' data_path = os.path.join(subtuple_directory, 'data') volumes = {data_path: {'bind': '/sandbox/data', 'mode': 'rw'}, opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}} diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index b59be6062..df291bfbe 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -4,6 +4,7 @@ import re import shutil import tempfile +import uuid import requests from django.conf import settings @@ -53,7 +54,7 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): metrics_path = os.path.join(getattr(settings, 'PROJECT_ROOT'), 'fake_metrics') # base metrics comes with substrabac metrics_docker = 'metrics_dry_run' # tag must be lowercase for docker - metrics_docker_name = f'{metrics_docker}_{pkhash}' + metrics_docker_name = f'{metrics_docker}_{pkhash}_{uuid.uuid4().hex}' volumes = {pred_path: {'bind': '/sandbox/pred', 'mode': 'rw'}, metrics_file: {'bind': '/sandbox/metrics/__init__.py', 'mode': 'ro'}, opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}} From a49df81d57e27a3a1519b5cfb100ca319aca2f24 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 23 Apr 2019 10:12:01 +0200 Subject: [PATCH 032/106] Fix message in case of conflict. --- substrabac/substrapp/views/testtuple.py | 8 ++++++-- substrabac/substrapp/views/traintuple.py | 7 +++++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index 383ef02e7..bdb0dfbec 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -1,3 +1,5 @@ +import json + from rest_framework import mixins, status from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet @@ -66,7 +68,8 @@ def create(self, request, *args, **kwargs): else: # If queryLedger fails, invoke will fail too so we handle the issue right now try: - pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() + msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) + pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() if len(pkhash) != 64: raise Exception('bad pkhash') @@ -87,7 +90,8 @@ def create(self, request, *args, **kwargs): if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): try: - pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() + msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) + pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() if len(pkhash) != 64: raise Exception('bad pkhash') diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index 478e291eb..3c93e1910 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -1,3 +1,4 @@ +import json from rest_framework import mixins, status from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet @@ -78,7 +79,8 @@ def create(self, request, *args, **kwargs): else: # If queryLedger fails, invoke will fail too so we handle the issue right now try: - pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() + msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) + pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() if len(pkhash) != 64: raise Exception('bad pkhash') @@ -99,7 +101,8 @@ def create(self, request, *args, **kwargs): if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): try: - pkhash = data['message'].replace(')" ', '').split('tkey: ')[-1].strip() + msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) + pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() if len(pkhash) != 64: raise Exception('bad pkhash') From bbdd6e999de795b95fc7bc8f518cf1bc4ca80238 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 23 Apr 2019 11:02:44 +0200 Subject: [PATCH 033/106] Update chaincode logging and message handling (to be removed with the SDK in the future) --- docker/start.py | 14 ++++++++++---- substrabac/substrapp/utils.py | 2 -- substrabac/substrapp/views/testtuple.py | 6 ++++-- substrabac/substrapp/views/traintuple.py | 6 ++++-- 4 files changed, 18 insertions(+), 10 deletions(-) diff --git a/docker/start.py b/docker/start.py index 95d1e2f9a..66443899d 100644 --- a/docker/start.py +++ b/docker/start.py @@ -10,6 +10,8 @@ raven_worker_url = "https://76abd6b5d11e48ea8a118831c86fc615@sentry.io/1402762" raven_scheduler_url = raven_worker_url +FABRIC_LOGGING_SPEC = "debug" + def generate_docker_compose_file(conf, launch_settings): try: @@ -80,7 +82,8 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_HOST={os.environ.get('SITE_HOST', 'localhost')}", f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}"], + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/substra/medias:/substra/medias', '/substra/dryrun:/substra/dryrun', '/substra/static:/usr/src/app/substrabac/statics', @@ -109,7 +112,8 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", 'DATABASE_HOST=postgresql', f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}"], + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': [f'/substra/conf/{org_name}:/substra/conf/{org_name}', f'/substra/data/orgs/{orderer}/ca-cert.pem:/substra/data/orgs/{orderer}/ca-cert.pem', f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', @@ -135,7 +139,8 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", 'DATABASE_HOST=postgresql', f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}"], + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', f'/substra/conf/{org_name}:/substra/conf/{org_name}', @@ -163,7 +168,8 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", 'DATABASE_HOST=postgresql', f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}"], + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', '/substra/dryrun:/substra/dryrun', diff --git a/substrabac/substrapp/utils.py b/substrabac/substrapp/utils.py index 268f8e9f2..a2bdaea8d 100644 --- a/substrabac/substrapp/utils.py +++ b/substrabac/substrapp/utils.py @@ -48,7 +48,6 @@ def queryLedger(options): print(f'Querying chaincode in the channel \'{channel_name}\' on the peer \'{peer["host"]}\' ...', flush=True) output = subprocess.run([os.path.join(PROJECT_ROOT, '../bin/peer'), - '--logging-level', 'DEBUG', 'chaincode', 'query', '-x', '-C', channel_name, @@ -105,7 +104,6 @@ def invokeLedger(options, sync=False): print(f'Sending invoke transaction to {peer["host"]} ...', flush=True) cmd = [os.path.join(PROJECT_ROOT, '../bin/peer'), - '--logging-level', 'DEBUG', 'chaincode', 'invoke', '-C', channel_name, '-n', chaincode_name, diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index bdb0dfbec..4baf7f3de 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -68,6 +68,7 @@ def create(self, request, *args, **kwargs): else: # If queryLedger fails, invoke will fail too so we handle the issue right now try: + data['message'] = data['message'].split('Error')[-1] msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() @@ -76,7 +77,7 @@ def create(self, request, *args, **kwargs): else: st = status.HTTP_409_CONFLICT - return Response({'message': data['message'], + return Response({'message': data['message'].split('payload')[0], 'pkhash': pkhash}, status=st) except: return Response(data, status=st) @@ -90,6 +91,7 @@ def create(self, request, *args, **kwargs): if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): try: + data['message'] = data['message'].split('Error')[-1] msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() @@ -98,7 +100,7 @@ def create(self, request, *args, **kwargs): else: st = status.HTTP_409_CONFLICT - return Response({'message': data['message'], + return Response({'message': data['message'].split('payload')[0], 'pkhash': pkhash}, status=st) except: return Response(data, status=st) diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index 3c93e1910..ee164a6ea 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -79,6 +79,7 @@ def create(self, request, *args, **kwargs): else: # If queryLedger fails, invoke will fail too so we handle the issue right now try: + data['message'] = data['message'].split('Error')[-1] msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() @@ -87,7 +88,7 @@ def create(self, request, *args, **kwargs): else: st = status.HTTP_409_CONFLICT - return Response({'message': data['message'], + return Response({'message': data['message'].split('payload')[0], 'pkhash': pkhash}, status=st) except: return Response(data, status=st) @@ -101,6 +102,7 @@ def create(self, request, *args, **kwargs): if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): try: + data['message'] = data['message'].split('Error')[-1] msg = json.loads(data['message'].split('payload:')[-1].strip().strip('"').encode('utf-8').decode('unicode_escape')) pkhash = msg['error'].replace('(', '').replace(')', '').split('tkey: ')[-1].strip() @@ -109,7 +111,7 @@ def create(self, request, *args, **kwargs): else: st = status.HTTP_409_CONFLICT - return Response({'message': data['message'], + return Response({'message': data['message'].split('payload')[0], 'pkhash': pkhash}, status=st) except: return Response(data, status=st) From 4506828e0d2ff316077bf293a6c4a465cd2400ed Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 23 Apr 2019 11:44:45 +0200 Subject: [PATCH 034/106] Update test assets and fix views --- substrabac/scripts/generate_assets.py | 12 +- substrabac/substrapp/tests/assets.py | 157 ++++++++++++---------- substrabac/substrapp/tests/tests_views.py | 27 +--- substrabac/substrapp/views/datamanager.py | 15 --- substrabac/substrapp/views/objective.py | 19 +-- 5 files changed, 98 insertions(+), 132 deletions(-) diff --git a/substrabac/scripts/generate_assets.py b/substrabac/scripts/generate_assets.py index 98b006654..ea0447a02 100644 --- a/substrabac/scripts/generate_assets.py +++ b/substrabac/scripts/generate_assets.py @@ -14,13 +14,13 @@ def main(): client.set_config('owkin') assets = {} - assets['objective'] = json.dumps(client.list('objective')['result'], indent=4) - assets['datamanager'] = json.dumps(client.list('data_manager')['result'], indent=4) - assets['algo'] = json.dumps(client.list('algo')['result'], indent=4) - assets['traintuple'] = json.dumps(client.list('traintuple')['result'], indent=4) - assets['testtuple'] = json.dumps(client.list('testtuple')['result'], indent=4) + assets['objective'] = json.dumps(client.list('objective'), indent=4) + assets['datamanager'] = json.dumps(client.list('data_manager'), indent=4) + assets['algo'] = json.dumps(client.list('algo'), indent=4) + assets['traintuple'] = json.dumps(client.list('traintuple'), indent=4) + assets['testtuple'] = json.dumps(client.list('testtuple'), indent=4) - assets['model'] = json.dumps([res for res in client.list('model')['result'] + assets['model'] = json.dumps([res for res in client.list('model') if ('traintuple' in res and 'testtuple' in res)], indent=4) with open(os.path.join(dir_path, '../substrapp/tests/assets.py'), 'w') as f: diff --git a/substrabac/substrapp/tests/assets.py b/substrabac/substrapp/tests/assets.py index d9a14315d..1e4a55eac 100644 --- a/substrabac/substrapp/tests/assets.py +++ b/substrabac/substrapp/tests/assets.py @@ -1,4 +1,20 @@ objective = [ + { + "key": "1cdafbb018dd195690111d74916b76c96892d897ec3587c814f287946db446c3", + "name": "Skin Lesion Classification Objective", + "description": { + "hash": "1cdafbb018dd195690111d74916b76c96892d897ec3587c814f287946db446c3", + "storageAddress": "http://testserver/objective/1cdafbb018dd195690111d74916b76c96892d897ec3587c814f287946db446c3/description/" + }, + "metrics": { + "name": "macro-average recall", + "hash": "0bc732c26bafdc41321c2bffd35b6835aa35f7371a4eb02994642c2c3a688f60", + "storageAddress": "http://testserver/objective/1cdafbb018dd195690111d74916b76c96892d897ec3587c814f287946db446c3/metrics/" + }, + "owner": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", + "testDataset": None, + "permissions": "all" + }, { "key": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "name": "Skin Lesion Classification Objective", @@ -11,7 +27,7 @@ "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" }, - "owner": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "owner": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", "testDataset": { "dataManagerKey": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", "dataSampleKeys": [ @@ -27,32 +43,32 @@ { "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "description": { - "hash": "15863c2af1fcfee9ca6f61f04be8a0eaaf6a45e4d50c421788d450d198e580f1", - "storageAddress": "http://testserver/data_manager/59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/description/" + "hash": "258bef187a166b3fef5cb86e68c8f7e154c283a148cd5bc344fec7e698821ad3", + "storageAddress": "http://testserver/data_manager/9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528/description/" }, - "key": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", - "name": "ISIC 2018", + "key": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", + "name": "Simplified ISIC 2018", "opener": { - "hash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", - "storageAddress": "http://testserver/data_manager/59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/opener/" + "hash": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", + "storageAddress": "http://testserver/data_manager/9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528/opener/" }, - "owner": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "owner": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", "permissions": "all", "type": "Images" }, { "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "description": { - "hash": "258bef187a166b3fef5cb86e68c8f7e154c283a148cd5bc344fec7e698821ad3", - "storageAddress": "http://testserver/data_manager/9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528/description/" + "hash": "15863c2af1fcfee9ca6f61f04be8a0eaaf6a45e4d50c421788d450d198e580f1", + "storageAddress": "http://testserver/data_manager/59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/description/" }, - "key": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", - "name": "Simplified ISIC 2018", + "key": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", + "name": "ISIC 2018", "opener": { - "hash": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", - "storageAddress": "http://testserver/data_manager/9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528/opener/" + "hash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", + "storageAddress": "http://testserver/data_manager/59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/opener/" }, - "owner": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "permissions": "all", "type": "Images" } @@ -70,8 +86,7 @@ "hash": "b9463411a01ea00869bdffce6e59a5c100a4e635c0a9386266cad3c77eb28e9e", "storageAddress": "http://testserver/algo/0acc5180e09b6a6ac250f4e3c172e2893f617aa1c22ef1f379019d20fe44142f/description/" }, - "owner": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", - "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", + "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "permissions": "all" }, { @@ -85,8 +100,7 @@ "hash": "124a0425b746d7072282d167b53cb6aab3a31bf1946dae89135c15b0126ebec3", "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/description/" }, - "owner": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", - "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", + "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "permissions": "all" }, { @@ -100,8 +114,7 @@ "hash": "4acea40c4b51996c88ef279c5c9aa41ab77b97d38c5ca167e978a98b2e402675", "storageAddress": "http://testserver/algo/f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284/description/" }, - "owner": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", - "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", + "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "permissions": "all" } ] @@ -113,7 +126,7 @@ "name": "Neural Network", "storageAddress": "http://testserver/algo/0acc5180e09b6a6ac250f4e3c172e2893f617aa1c22ef1f379019d20fe44142f/file/" }, - "creator": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "dataset": { "keys": [ "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", @@ -121,12 +134,12 @@ ], "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", "perf": 0, - "worker": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e" + "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" }, "fltask": "", "inModels": None, - "key": "dfa89a184b6ba5c50daa5a7176818fe1b1c5c3b781b30b99e4d79eef036006f2", - "log": "[00-01-0032-456da5d]", + "key": "1a585c39a427b14e96388f2fb2acd10bc0b26560022a40cb371cbcc55b3cafc7", + "log": "[00-01-0032-45bad7f]", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { @@ -137,7 +150,8 @@ "outModel": None, "permissions": "all", "rank": 0, - "status": "failed" + "status": "failed", + "tag": "My super tag" }, { "algo": { @@ -145,7 +159,7 @@ "name": "Logistic regression", "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" }, - "creator": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "dataset": { "keys": [ "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", @@ -153,12 +167,12 @@ ], "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", "perf": 1, - "worker": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e" + "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" }, "fltask": "", "inModels": None, - "key": "66caabaf37455cc7af8e89cac37eb0ebfdf73ac7fe4765c644ea6340c2589c0a", - "log": "Train - CPU:78.04 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", + "key": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1", + "log": "Train - CPU:77.60 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { @@ -167,12 +181,13 @@ } }, "outModel": { - "hash": "2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de", - "storageAddress": "http://testserver/model/2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de/file/" + "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", + "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/" }, "permissions": "all", "rank": 0, - "status": "done" + "status": "done", + "tag": "substra" }, { "algo": { @@ -180,7 +195,7 @@ "name": "Random Forest", "storageAddress": "http://testserver/algo/f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284/file/" }, - "creator": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "dataset": { "keys": [ "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", @@ -188,12 +203,12 @@ ], "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", "perf": 0, - "worker": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e" + "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" }, "fltask": "", "inModels": None, - "key": "7f4bea1afafefda207daf7c24034aab4f1db0df0575ba6b303d3d7a6df1794e7", - "log": "[00-01-0032-ea27bd6]", + "key": "9271dbc9d629c5d3bccd4c6f269f54e0d253fb5c53d3de958159605778b3de29", + "log": "[00-01-0032-899a79c]", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { @@ -204,31 +219,23 @@ "outModel": None, "permissions": "all", "rank": 0, - "status": "failed" + "status": "failed", + "tag": "" } ] testtuple = [ { - "objective": { - "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", - "metrics": { - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", - "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" - } - }, + "key": "cc0c0465c6aff2fd195bcc8e2ad45379f991a141224340f984d61556e6bfd09c", "algo": { "name": "Logistic regression", "hash": "da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b", "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" }, - "model": { - "traintupleKey": "66caabaf37455cc7af8e89cac37eb0ebfdf73ac7fe4765c644ea6340c2589c0a", - "hash": "2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de", - "storageAddress": "http://testserver/model/2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de/file/" - }, + "certified": True, + "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "dataset": { - "worker": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "worker": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", "keys": [ "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a", "8bf3bf4f753a32f27d18c86405e7a406a83a55610d91abcca9acc525061b8ecf" @@ -236,11 +243,22 @@ "openerHash": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", "perf": 0 }, - "certified": True, - "status": "done", "log": "Test - CPU:0.00 % - Mem:0.00 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", + "model": { + "traintupleKey": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1", + "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", + "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/" + }, + "objective": { + "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", + "metrics": { + "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" + } + }, "permissions": "all", - "creator": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e" + "status": "done", + "tag": "" } ] @@ -253,7 +271,7 @@ "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" }, "certified": True, - "creator": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "dataset": { "keys": [ "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a", @@ -261,14 +279,14 @@ ], "openerHash": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", "perf": 0, - "worker": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e" + "worker": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18" }, - "key": "0cd626cf445b1e17f7fb854e696d87db65b460545aab1677920459ae8a774f4f", + "key": "cc0c0465c6aff2fd195bcc8e2ad45379f991a141224340f984d61556e6bfd09c", "log": "Test - CPU:0.00 % - Mem:0.00 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", "model": { - "hash": "2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de", - "storageAddress": "http://testserver/model/2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de/file/", - "traintupleKey": "66caabaf37455cc7af8e89cac37eb0ebfdf73ac7fe4765c644ea6340c2589c0a" + "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", + "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/", + "traintupleKey": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1" }, "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", @@ -278,7 +296,8 @@ } }, "permissions": "all", - "status": "done" + "status": "done", + "tag": "" }, "traintuple": { "algo": { @@ -286,7 +305,7 @@ "name": "Logistic regression", "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" }, - "creator": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e", + "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", "dataset": { "keys": [ "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", @@ -294,12 +313,12 @@ ], "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", "perf": 1, - "worker": "703433008d3f62dab5ffaccb3c53d723660f5f6cdac3c5dfd26ac88312b5a94e" + "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" }, "fltask": "", "inModels": None, - "key": "66caabaf37455cc7af8e89cac37eb0ebfdf73ac7fe4765c644ea6340c2589c0a", - "log": "Train - CPU:78.04 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", + "key": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1", + "log": "Train - CPU:77.60 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { @@ -308,12 +327,14 @@ } }, "outModel": { - "hash": "2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de", - "storageAddress": "http://testserver/model/2bd56e309a7e899027a1e8b3990fd7a69986291043079d836bc2f8bcdb9ec8de/file/" + "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", + "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/" }, "permissions": "all", "rank": 0, - "status": "done" + "status": "done", + "tag": "substra" } } ] + diff --git a/substrabac/substrapp/tests/tests_views.py b/substrabac/substrapp/tests/tests_views.py index 5e46e633e..562f680bf 100644 --- a/substrabac/substrapp/tests/tests_views.py +++ b/substrabac/substrapp/tests/tests_views.py @@ -131,7 +131,7 @@ def test_objective_list_filter_name(self): response = self.client.get(url + search_params, **self.extra) r = response.json() - self.assertEqual(len(r[0]), 1) + self.assertEqual(len(r[0]), 2) def test_objective_list_filter_metrics(self): url = reverse('substrapp:objective-list') @@ -156,19 +156,6 @@ def test_objective_list_filter_datamanager(self): self.assertEqual(len(r[0]), 1) - def test_objective_list_filter_algo(self): - url = reverse('substrapp:objective-list') - with mock.patch('substrapp.views.objective.queryLedger') as mqueryLedger: - mqueryLedger.side_effect = [(objective, status.HTTP_200_OK), - (algo, status.HTTP_200_OK)] - - url = reverse('substrapp:objective-list') - search_params = '?search=algo%253Aname%253ALogistic%2520regression' - response = self.client.get(url + search_params, **self.extra) - r = response.json() - - self.assertEqual(len(r[0]), 1) - def test_objective_list_filter_model(self): url = reverse('substrapp:objective-list') with mock.patch('substrapp.views.objective.queryLedger') as mqueryLedger: @@ -706,18 +693,6 @@ def test_datamanager_list_filter_name(self): self.assertEqual(len(r[0]), 1) - def test_datamanager_list_filter_algo(self): - url = reverse('substrapp:data_manager-list') - with mock.patch('substrapp.views.datamanager.queryLedger') as mqueryLedger: - mqueryLedger.side_effect = [(datamanager, status.HTTP_200_OK), - (algo, status.HTTP_200_OK)] - - search_params = '?search=algo%253Aname%253ALogistic%2520regression' - response = self.client.get(url + search_params, **self.extra) - r = response.json() - - self.assertEqual(len(r[0]), 2) - def test_datamanager_list_filter_objective(self): url = reverse('substrapp:data_manager-list') with mock.patch('substrapp.views.datamanager.queryLedger') as mqueryLedger: diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index a78f15c28..96869ef72 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -304,21 +304,6 @@ def list(self, request, *args, **kwargs): filteredData = [x for x in objectiveData if x[key] in val] objectiveKeys = [x['key'] for x in filteredData] l[idx] = [x for x in l[idx] if x['objectiveKey'] in objectiveKeys] - elif k == 'algo': # select objective used by these algo - if not algoData: - # TODO find a way to put this call in cache - algoData, st = queryLedger({ - 'args': '{"Args":["queryAlgos"]}' - }) - if st != status.HTTP_200_OK: - return Response(algoData, status=st) - if algoData is None: - algoData = [] - - for key, val in subfilters.items(): - filteredData = [x for x in algoData if x[key] in val] - objectiveKeys = [x['objectiveKey'] for x in filteredData] - l[idx] = [x for x in l[idx] if x['objectiveKey'] in objectiveKeys] elif k == 'model': # select objectives used by outModel hash if not modelData: # TODO find a way to put this call in cache diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index df291bfbe..7ffbe6e7f 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -340,23 +340,8 @@ def list(self, request, *args, **kwargs): filteredData = [x for x in dataManagerData if x[key] in val] dataManagerKeys = [x['key'] for x in filteredData] objectiveKeys = [x['objectiveKey'] for x in filteredData] - objectives[idx] = [x for x in objectives[idx] if x['key'] in objectiveKeys or x['testData']['dataManagerKey'] in dataManagerKeys] - - elif k == 'algo': # select objective used by these algo - if not algoData: - # TODO find a way to put this call in cache - algoData, st = queryLedger({ - 'args': '{"Args":["queryAlgos"]}' - }) - if st != status.HTTP_200_OK: - return Response(algoData, status=st) - if algoData is None: - algoData = [] - - for key, val in subfilters.items(): - filteredData = [x for x in algoData if x[key] in val] - objectiveKeys = [x['objectiveKey'] for x in filteredData] - objectives[idx] = [x for x in objectives[idx] if x['key'] in objectiveKeys] + objectives[idx] = [x for x in objectives[idx] if x['key'] in objectiveKeys or + (x['testDataset'] and x['testDataset']['dataManagerKey'] in dataManagerKeys)] elif k == 'model': # select objectives used by outModel hash if not modelData: From f0e80e2809333bb833dcc855b04d514e0ee57365 Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Fri, 12 Apr 2019 14:49:11 +0200 Subject: [PATCH 035/106] Support remote paths --- .../management/commands/createobjective.py | 1 - .../substrapp/tests/tests_createobjective.py | 6 ------ substrabac/substrapp/views/datasample.py | 16 +++++++++++++++- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/substrabac/substrapp/management/commands/createobjective.py b/substrabac/substrapp/management/commands/createobjective.py index af240adcf..b50f62353 100644 --- a/substrabac/substrapp/management/commands/createobjective.py +++ b/substrabac/substrapp/management/commands/createobjective.py @@ -10,7 +10,6 @@ from substrapp.management.utils.localRequest import LocalRequest from substrapp.serializers import DataManagerSerializer, LedgerDataManagerSerializer, \ LedgerObjectiveSerializer, ObjectiveSerializer -from substrapp.serializers.ledger.datamanager.util import updateLedgerDataManager from substrapp.utils import get_hash from substrapp.views.datasample import LedgerException diff --git a/substrabac/substrapp/tests/tests_createobjective.py b/substrabac/substrapp/tests/tests_createobjective.py index a114c4885..9df1beff3 100644 --- a/substrabac/substrapp/tests/tests_createobjective.py +++ b/substrabac/substrapp/tests/tests_createobjective.py @@ -75,7 +75,6 @@ def test_createobjective(self): with patch.object(LedgerObjectiveSerializer, 'create') as mobjectivecreate, \ patch.object(LedgerDataManagerSerializer, 'create') as mdatamanagercreate, \ - patch('substrapp.management.commands.createobjective.updateLedgerDataManager') as mdatamanagerupdate, \ patch.object(LedgerDataSampleSerializer, 'create') as mdatacreate, \ patch('substrapp.views.datasample.DataSampleViewSet.check_datamanagers') as mcheck_datamanagers: @@ -95,11 +94,6 @@ def test_createobjective(self): }, status.HTTP_201_CREATED) - mdatamanagerupdate.return_value = ({ - 'pkhash': datamanager_pk - }, - status.HTTP_201_CREATED) - mcheck_datamanagers.return_value = True saved_stdout = sys.stdout diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 84d4b5202..abd62c203 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -1,8 +1,11 @@ +from os.path import normpath + import docker import os import ntpath import uuid +from checksumdir import dirhash from django.conf import settings from docker.errors import ContainerError from rest_framework import status, mixins @@ -190,6 +193,7 @@ def create(self, request, *args, **kwargs): return Response({'message': str(e)}, status=status.HTTP_400_BAD_REQUEST) else: l = [] + # files, should be archive for k, file in request.FILES.items(): try: pkhash = get_dir_hash(file) @@ -205,8 +209,18 @@ def create(self, request, *args, **kwargs): 'pkhash': pkhash, 'file': file }) + # paths, should be directory + for path in request.POST.getlist('paths'): + if os.path.isdir(path): + pkhash = dirhash(path, 'sha256') + l.append({ + 'pkhash': pkhash, + 'path': normpath(path) + }) + else: + return Response({'message': f'One of your paths does not exist, is not a directory or is not an absolute path: {path}'}, status=status.HTTP_400_BAD_REQUEST) - many = len(request.FILES) > 1 + many = len(l) > 1 data = l if not many: data = data[0] From dc7c6bebc0888732bc8a64e5766110a41c0cd212 Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Fri, 12 Apr 2019 16:01:17 +0200 Subject: [PATCH 036/106] Update populate.py --- substrabac/populate.py | 11 ++++++----- substrabac/substrapp/views/datasample.py | 19 +++++++++++++++++-- 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 4bf4468e4..f19889e4d 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -125,10 +125,11 @@ def update_datamanager(data_manager_key, data, profile): train_data_sample_keys = [] if data_manager_org1_key: print(f'register train data on datamanager {org_1} (will take datamanager creator as worker)') + data = { - 'files': [ + 'paths': [ os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024306.zip'), - os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024307.zip') + os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024308') ], 'data_manager_keys': [data_manager_org1_key], 'test_only': False, @@ -152,7 +153,7 @@ def update_datamanager(data_manager_key, data, profile): if data_manager_org0_key and data_manager_org1_key: print('register test data') data = { - 'files': [ + 'paths': [ os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024900.zip'), os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024901.zip') ], @@ -165,7 +166,7 @@ def update_datamanager(data_manager_key, data, profile): print('register test data 2') data = { - 'files': [ + 'paths': [ os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024902.zip'), os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024903.zip') ], @@ -178,7 +179,7 @@ def update_datamanager(data_manager_key, data, profile): print('register test data 3') data = { - 'files': [ + 'paths': [ os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024904.zip'), os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024905.zip') ], diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index abd62c203..9ca090a66 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -209,8 +209,20 @@ def create(self, request, *args, **kwargs): 'pkhash': pkhash, 'file': file }) - # paths, should be directory - for path in request.POST.getlist('paths'): + + # path/paths case + path = request.POST.get('path', None) + paths = request.POST.getlist('paths', []) + + if path and paths: + return Response({'message': 'Cannot use path and paths together.'}, + status=status.HTTP_400_BAD_REQUEST) + + if path is not None: + paths = [path] + + # paths, should be directories + for path in paths: if os.path.isdir(path): pkhash = dirhash(path, 'sha256') l.append({ @@ -236,6 +248,9 @@ def create(self, request, *args, **kwargs): else: if dryrun: + # TODO handle + # path = request.POST.get('path', None) + # paths = request.POST.getlist('paths', []) try: data_sample_files = [] for k, file in request.FILES.items(): From b7ef0ccbd42e3e7213c0ebeb565a8f98dba135b9 Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Tue, 16 Apr 2019 11:39:50 +0200 Subject: [PATCH 037/106] Refacto on data_sample create for handling dryrun paths --- substrabac/substrapp/views/datasample.py | 335 ++++++++++++----------- 1 file changed, 180 insertions(+), 155 deletions(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 9ca090a66..0437eed18 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -1,3 +1,4 @@ +import logging from os.path import normpath import docker @@ -25,6 +26,8 @@ from substrapp.tasks import build_subtuple_folders, remove_subtuple_materials from substrapp.views.utils import find_primary_key_error +logger = logging.getLogger('django.request') + def path_leaf(path): head, tail = ntpath.split(path) @@ -38,36 +41,48 @@ def __init__(self, data, st): super(LedgerException).__init__() +class InvalidException(Exception): + def __init__(self, data, pkhash, st): + self.data = data + self.pkhash = pkhash + self.st = st + super(LedgerException).__init__() + + @app.task(bind=True, ignore_result=False) -def compute_dryrun(self, data_sample_files, data_manager_keys): +def compute_dryrun(self, data, data_manager_keys): from shutil import copy from substrapp.models import DataManager - try: - # Name of the dry-run subtuple (not important) - pkhash = data_sample_files[0]['pkhash'] + client = docker.from_env() - subtuple_directory = build_subtuple_folders({'key': pkhash}) + # Name of the dry-run subtuple (not important) + pkhash = data[0]['pkhash'] + subtuple_directory = build_subtuple_folders({'key': pkhash}) + + try: - for data_sample in data_sample_files: - try: - uncompress_path(data_sample['filepath'], - os.path.join(subtuple_directory, 'data', data_sample['pkhash'])) - except Exception as e: - raise e + for data_sample in data: + # uncompress only for file + if 'file' in data_sample: + try: + uncompress_path(data_sample['file'], os.path.join(subtuple_directory, 'data', data_sample['pkhash'])) + except Exception as e: + raise e for datamanager_key in data_manager_keys: datamanager = DataManager.objects.get(pk=datamanager_key) copy(datamanager.data_opener.path, os.path.join(subtuple_directory, 'opener/opener.py')) # Launch verification - client = docker.from_env() opener_file = os.path.join(subtuple_directory, 'opener/opener.py') data_sample_docker_path = os.path.join(getattr(settings, 'PROJECT_ROOT'), 'fake_data_sample') # fake_data comes with substrabac data_docker = 'data_dry_run' # tag must be lowercase for docker data_docker_name = f'{data_docker}_{pkhash}_{uuid.uuid4().hex}' data_path = os.path.join(subtuple_directory, 'data') + + # TODO bind paths volumes = {data_path: {'bind': '/sandbox/data', 'mode': 'rw'}, opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}} @@ -98,11 +113,11 @@ def compute_dryrun(self, data_sample_files, data_manager_keys): container = client.containers.get(data_docker_name) container.remove() except: - pass + logger.error('Could not remove containers') remove_subtuple_materials(subtuple_directory) - for data_sample in data_sample_files: - if os.path.exists(data_sample['filepath']): - os.remove(data_sample['filepath']) + for data_sample in data: + if 'file' in data_sample and os.path.exists(data_sample['file']): + os.remove(data_sample['file']) class DataSampleViewSet(mixins.CreateModelMixin, @@ -114,8 +129,8 @@ class DataSampleViewSet(mixins.CreateModelMixin, queryset = DataSample.objects.all() serializer_class = DataSampleSerializer - def dryrun_task(self, data_sample_files, data_manager_keys): - task = compute_dryrun.apply_async((data_sample_files, data_manager_keys), + def dryrun_task(self, data, data_manager_keys): + task = compute_dryrun.apply_async((data, data_manager_keys), queue=f"{settings.LEDGER['name']}.dryrunner") url_http = 'http' if settings.DEBUG else 'https' site_port = getattr(settings, "SITE_PORT", None) @@ -134,157 +149,167 @@ def check_datamanagers(data_manager_keys): @staticmethod def commit(serializer, ledger_data, many): + instances = serializer.save() # can raise + # init ledger serializer + if not many: + instances = [instances] + ledger_data.update({'instances': instances}) + ledger_serializer = LedgerDataSampleSerializer(data=ledger_data) + + if not ledger_serializer.is_valid(): + # delete instance + for instance in instances: + instance.delete() + raise ValidationError(ledger_serializer.errors) + + # create on ledger + data, st = ledger_serializer.create(ledger_serializer.validated_data) + + if st == status.HTTP_408_REQUEST_TIMEOUT: + if many: + data.update({'pkhash': [x['pkhash'] for x in serializer.data]}) + raise LedgerException(data, st) + + if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): + raise LedgerException(data, st) + + # update validated to True in response + if 'pkhash' in data and data['validated']: + if many: + for d in serializer.data: + if d['pkhash'] in data['pkhash']: + d.update({'validated': data['validated']}) + else: + d = dict(serializer.data) + d.update({'validated': data['validated']}) + + return serializer.data, st + + def compute_data(self, request): + data = [] + # files, should be archive + for k, file in request.FILES.items(): + pkhash = get_dir_hash(file) # can raise + # check pkhash does not belong to the list + for x in data: + if pkhash == x['pkhash']: + raise Exception(f'Your data sample archives contain same files leading to same pkhash, please review the content of your achives. Archives {file} and {x["file"]} are the same') + data.append({ + 'pkhash': pkhash, + 'file': file + }) + + # path/paths case + path = request.POST.get('path', None) + paths = request.POST.getlist('paths', []) + + if path and paths: + raise Exception('Cannot use path and paths together.') + + if path is not None: + paths = [path] + + # paths, should be directories + for path in paths: + if os.path.isdir(path): + pkhash = dirhash(path, 'sha256') + data.append({ + 'pkhash': pkhash, + 'path': normpath(path) + }) + else: + raise Exception(f'One of your paths does not exist, is not a directory or is not an absolute path: {path}') + + return data + + def handle_dryrun(self, request, data_manager_keys): + data = [] + for k, file in request.FILES.items(): + pkhash = get_hash(file) + + # write on DRYRUN_ROOT + file_path = os.path.join(getattr(settings, 'DRYRUN_ROOT'), + f'data_{pkhash}.zip') + with open(file_path, 'wb') as data_file: + data_file.write(file.open().read()) + + data.append({ + 'pkhash': pkhash, + 'file': file_path, + }) + + # path/paths case + path = request.POST.get('path', None) + paths = request.POST.getlist('paths', []) + + if path is not None: + paths = [path] + + # paths, should be directories + for path in paths: + if os.path.isdir(path): + pkhash = dirhash(path, 'sha256') + data.append({ + 'pkhash': pkhash, + 'path': normpath(path) + }) + try: - instances = serializer.save() - except Exception as exc: - raise exc + task, msg = self.dryrun_task(data, data_manager_keys) + except Exception as e: + return Exception(f'Could not launch data creation with dry-run on this instance: {str(e)}') else: - # init ledger serializer - if not many: - instances = [instances] - ledger_data.update({'instances': instances}) - ledger_serializer = LedgerDataSampleSerializer(data=ledger_data) - - if not ledger_serializer.is_valid(): - # delete instance - for instance in instances: - instance.delete() - raise ValidationError(ledger_serializer.errors) - - # create on ledger - data, st = ledger_serializer.create(ledger_serializer.validated_data) - - if st == status.HTTP_408_REQUEST_TIMEOUT: - if many: - data.update({'pkhash': [x['pkhash'] for x in serializer.data]}) - raise LedgerException(data, st) - - if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): - raise LedgerException(data, st) - - # update validated to True in response - if 'pkhash' in data and data['validated']: - if many: - for d in serializer.data: - if d['pkhash'] in data['pkhash']: - d.update({'validated': data['validated']}) - else: - d = dict(serializer.data) - d.update({'validated': data['validated']}) - - return serializer.data, st + return {'id': task.id, 'message': msg}, status.HTTP_202_ACCEPTED, {} + + def _create(self, request, data_manager_keys, test_only, dryrun): + + if not data_manager_keys: + raise Exception("missing or empty field 'data_manager_keys'") + + self.check_datamanagers(data_manager_keys) # can raise + + computed_data = self.compute_data(request) + + many = len(computed_data) > 1 + data = computed_data if many else computed_data[0] + + serializer = self.get_serializer(data=data, many=many) + try: + serializer.is_valid(raise_exception=True) + except Exception as e: + pkhashes = [x['pkhash'] for x in computed_data] + st = status.HTTP_400_BAD_REQUEST + if find_primary_key_error(e): + st = status.HTTP_409_CONFLICT + raise InvalidException(e.args, pkhashes, st) + else: + if dryrun: + return self.handle_dryrun(request, data_manager_keys) + + # create on ledger + db + ledger_data = {'test_only': test_only, + 'data_manager_keys': data_manager_keys} + data, st = self.commit(serializer, ledger_data, many) + headers = self.get_success_headers(data) + return data, st, headers def create(self, request, *args, **kwargs): data = request.data dryrun = data.get('dryrun', False) test_only = data.get('test_only', False) - - # check if bulk create data_manager_keys = data.getlist('data_manager_keys') - if not data_manager_keys: - message = "missing or empty field 'data_manager_keys'" - return Response({'message': message}, - status=status.HTTP_400_BAD_REQUEST) try: - self.check_datamanagers(data_manager_keys) + data, st, headers = self._create(request, data_manager_keys, test_only, dryrun) + except InvalidException as e: + return Response({'message': e.data, 'pkhash': e.pkhash}, status=e.st) + except LedgerException as e: + return Response({'message': e.data}, status=e.st) except Exception as e: return Response({'message': str(e)}, status=status.HTTP_400_BAD_REQUEST) else: - l = [] - # files, should be archive - for k, file in request.FILES.items(): - try: - pkhash = get_dir_hash(file) - except Exception as e: - return Response({'message': str(e)}, - status=status.HTTP_400_BAD_REQUEST) - else: - # check pkhash does not belong to the list - for x in l: - if pkhash == x['pkhash']: - return Response({'message': f'Your data sample archives contain same files leading to same pkhash, please review the content of your achives. Archives {file} and {x["file"]} are the same'}, status=status.HTTP_400_BAD_REQUEST) - l.append({ - 'pkhash': pkhash, - 'file': file - }) - - # path/paths case - path = request.POST.get('path', None) - paths = request.POST.getlist('paths', []) - - if path and paths: - return Response({'message': 'Cannot use path and paths together.'}, - status=status.HTTP_400_BAD_REQUEST) - - if path is not None: - paths = [path] - - # paths, should be directories - for path in paths: - if os.path.isdir(path): - pkhash = dirhash(path, 'sha256') - l.append({ - 'pkhash': pkhash, - 'path': normpath(path) - }) - else: - return Response({'message': f'One of your paths does not exist, is not a directory or is not an absolute path: {path}'}, status=status.HTTP_400_BAD_REQUEST) - - many = len(l) > 1 - data = l - if not many: - data = data[0] - serializer = self.get_serializer(data=data, many=many) - try: - serializer.is_valid(raise_exception=True) - except Exception as e: - pkhashes = [x['pkhash'] for x in l] - st = status.HTTP_400_BAD_REQUEST - if find_primary_key_error(e): - st = status.HTTP_409_CONFLICT - return Response({'message': e.args, 'pkhash': pkhashes}, status=st) - - else: - if dryrun: - # TODO handle - # path = request.POST.get('path', None) - # paths = request.POST.getlist('paths', []) - try: - data_sample_files = [] - for k, file in request.FILES.items(): - pkhash = get_hash(file) - - data_path = os.path.join(getattr(settings, 'DRYRUN_ROOT'), f'data_{pkhash}.zip') - with open(data_path, 'wb') as data_file: - data_file.write(file.open().read()) - - data_sample_files.append({ - 'pkhash': pkhash, - 'filepath': data_path, - }) - - task, msg = self.dryrun_task(data_sample_files, data_manager_keys) - except Exception as e: - return Response({'message': f'Could not launch data creation with dry-run on this instance: {str(e)}'}, - status=status.HTTP_400_BAD_REQUEST) - else: - return Response({'id': task.id, 'message': msg}, - status=status.HTTP_202_ACCEPTED) - - # create on ledger + db - ledger_data = {'test_only': test_only, - 'data_manager_keys': data_manager_keys} - try: - data, st = self.commit(serializer, ledger_data, many) - except LedgerException as e: - return Response({'message': e.data}, status=e.st) - except Exception as e: - return Response({'message': str(e)}, status=status.HTTP_400_BAD_REQUEST) - else: - headers = self.get_success_headers(data) - return Response(data, status=st, headers=headers) + return Response(data, status=st, headers=headers) @action(methods=['post'], detail=False) def bulk_update(self, request): From f9d59c299115935129a847792bb3c5ddb6f08bf8 Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Tue, 16 Apr 2019 11:48:47 +0200 Subject: [PATCH 038/106] clean --- substrabac/substrapp/views/datasample.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 0437eed18..81893ed79 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -259,7 +259,7 @@ def handle_dryrun(self, request, data_manager_keys): except Exception as e: return Exception(f'Could not launch data creation with dry-run on this instance: {str(e)}') else: - return {'id': task.id, 'message': msg}, status.HTTP_202_ACCEPTED, {} + return {'id': task.id, 'message': msg}, status.HTTP_202_ACCEPTED def _create(self, request, data_manager_keys, test_only, dryrun): @@ -290,18 +290,15 @@ def _create(self, request, data_manager_keys, test_only, dryrun): ledger_data = {'test_only': test_only, 'data_manager_keys': data_manager_keys} data, st = self.commit(serializer, ledger_data, many) - headers = self.get_success_headers(data) - return data, st, headers + return data, st def create(self, request, *args, **kwargs): - data = request.data - - dryrun = data.get('dryrun', False) - test_only = data.get('test_only', False) - data_manager_keys = data.getlist('data_manager_keys') + dryrun = request.data.get('dryrun', False) + test_only = request.data.get('test_only', False) + data_manager_keys = request.data.getlist('data_manager_keys', []) try: - data, st, headers = self._create(request, data_manager_keys, test_only, dryrun) + data, st = self._create(request, data_manager_keys, test_only, dryrun) except InvalidException as e: return Response({'message': e.data, 'pkhash': e.pkhash}, status=e.st) except LedgerException as e: @@ -309,6 +306,7 @@ def create(self, request, *args, **kwargs): except Exception as e: return Response({'message': str(e)}, status=status.HTTP_400_BAD_REQUEST) else: + headers = self.get_success_headers(data) return Response(data, status=st, headers=headers) @action(methods=['post'], detail=False) From f0b2c7512f2660f5e2270ea4a91b6df6d25efbeb Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Tue, 16 Apr 2019 17:39:01 +0200 Subject: [PATCH 039/106] fix InvalidException --- substrabac/substrapp/views/datasample.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 81893ed79..fe2562066 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -46,7 +46,7 @@ def __init__(self, data, pkhash, st): self.data = data self.pkhash = pkhash self.st = st - super(LedgerException).__init__() + super(InvalidException).__init__() @app.task(bind=True, ignore_result=False) From 72fd5adb70cdf7c08f7c50d0547921e7dcbdd690 Mon Sep 17 00:00:00 2001 From: Samuel Date: Wed, 17 Apr 2019 09:47:39 +0200 Subject: [PATCH 040/106] rename InvalidException to ValidationException --- substrabac/substrapp/views/datasample.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index fe2562066..cd0850ce8 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -41,12 +41,12 @@ def __init__(self, data, st): super(LedgerException).__init__() -class InvalidException(Exception): +class ValidationException(Exception): def __init__(self, data, pkhash, st): self.data = data self.pkhash = pkhash self.st = st - super(InvalidException).__init__() + super(ValidationException).__init__() @app.task(bind=True, ignore_result=False) @@ -281,7 +281,7 @@ def _create(self, request, data_manager_keys, test_only, dryrun): st = status.HTTP_400_BAD_REQUEST if find_primary_key_error(e): st = status.HTTP_409_CONFLICT - raise InvalidException(e.args, pkhashes, st) + raise ValidationException(e.args, pkhashes, st) else: if dryrun: return self.handle_dryrun(request, data_manager_keys) @@ -299,7 +299,7 @@ def create(self, request, *args, **kwargs): try: data, st = self._create(request, data_manager_keys, test_only, dryrun) - except InvalidException as e: + except ValidationException as e: return Response({'message': e.data, 'pkhash': e.pkhash}, status=e.st) except LedgerException as e: return Response({'message': e.data}, status=e.st) From b6a3d5de672fc9a4dc0df96cedadcb2f9321a977 Mon Sep 17 00:00:00 2001 From: Samuel Date: Wed, 17 Apr 2019 09:58:31 +0200 Subject: [PATCH 041/106] don't parse twice input request --- substrabac/substrapp/views/datasample.py | 78 ++++++++++-------------- 1 file changed, 33 insertions(+), 45 deletions(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index cd0850ce8..5b93b62a8 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -22,7 +22,7 @@ from substrapp.serializers import DataSampleSerializer, LedgerDataSampleSerializer from substrapp.serializers.ledger.datasample.util import updateLedgerDataSample from substrapp.serializers.ledger.datasample.tasks import updateLedgerDataSampleAsync -from substrapp.utils import get_hash, uncompress_path, get_dir_hash +from substrapp.utils import uncompress_path, get_dir_hash from substrapp.tasks import build_subtuple_folders, remove_subtuple_materials from substrapp.views.utils import find_primary_key_error @@ -186,18 +186,21 @@ def commit(serializer, ledger_data, many): return serializer.data, st def compute_data(self, request): - data = [] + data = {} # files, should be archive for k, file in request.FILES.items(): pkhash = get_dir_hash(file) # can raise # check pkhash does not belong to the list - for x in data: - if pkhash == x['pkhash']: - raise Exception(f'Your data sample archives contain same files leading to same pkhash, please review the content of your achives. Archives {file} and {x["file"]} are the same') - data.append({ + try: + existing = data[pkhash] + except KeyError: + pass + else: + raise Exception(f'Your data sample archives contain same files leading to same pkhash, please review the content of your achives. Archives {file} and {existing["file"]} are the same') + data[pkhash] = { 'pkhash': pkhash, 'file': file - }) + } # path/paths case path = request.POST.get('path', None) @@ -211,48 +214,33 @@ def compute_data(self, request): # paths, should be directories for path in paths: - if os.path.isdir(path): - pkhash = dirhash(path, 'sha256') - data.append({ - 'pkhash': pkhash, - 'path': normpath(path) - }) - else: + if not os.path.isdir(path): raise Exception(f'One of your paths does not exist, is not a directory or is not an absolute path: {path}') + pkhash = dirhash(path, 'sha256') + try: + existing = data[pkhash] + except KeyError: + pass + else: + # existing can be a dict with a field path or file + raise Exception(f'Your data sample directory contain same files leading to same pkhash. Invalid path: {path}.') - return data - - def handle_dryrun(self, request, data_manager_keys): - data = [] - for k, file in request.FILES.items(): - pkhash = get_hash(file) - - # write on DRYRUN_ROOT - file_path = os.path.join(getattr(settings, 'DRYRUN_ROOT'), - f'data_{pkhash}.zip') - with open(file_path, 'wb') as data_file: - data_file.write(file.open().read()) - - data.append({ + data[pkhash] = { 'pkhash': pkhash, - 'file': file_path, - }) - - # path/paths case - path = request.POST.get('path', None) - paths = request.POST.getlist('paths', []) + 'path': normpath(path) + } - if path is not None: - paths = [path] + return list(data.values()) - # paths, should be directories - for path in paths: - if os.path.isdir(path): - pkhash = dirhash(path, 'sha256') - data.append({ - 'pkhash': pkhash, - 'path': normpath(path) - }) + def handle_dryrun(self, data, data_manager_keys): + # write uploaded file to disk + for d in data: + pkhash = d['pkhash'] + if 'file' in d: + file_path = os.path.join(getattr(settings, 'DRYRUN_ROOT'), + f'data_{pkhash}.zip') + with open(file_path, 'wb') as f: + f.write(d['file'].open().read()) try: task, msg = self.dryrun_task(data, data_manager_keys) @@ -284,7 +272,7 @@ def _create(self, request, data_manager_keys, test_only, dryrun): raise ValidationException(e.args, pkhashes, st) else: if dryrun: - return self.handle_dryrun(request, data_manager_keys) + return self.handle_dryrun(computed_data, data_manager_keys) # create on ledger + db ledger_data = {'test_only': test_only, From efb7af9665eaa7dd9ec4ec95d3a325de599908b3 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 23 Apr 2019 11:52:37 +0200 Subject: [PATCH 042/106] update populate to use register --- docker/start.py | 3 ++ substrabac/populate.py | 69 ++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 70 insertions(+), 2 deletions(-) diff --git a/docker/start.py b/docker/start.py index 66443899d..33dad9702 100644 --- a/docker/start.py +++ b/docker/start.py @@ -85,6 +85,7 @@ def generate_docker_compose_file(conf, launch_settings): f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/substra/medias:/substra/medias', + '/substra/servermedias:/substra/servermedias', '/substra/dryrun:/substra/dryrun', '/substra/static:/usr/src/app/substrabac/statics', f'/substra/conf/{org_name}:/substra/conf/{org_name}', @@ -143,6 +144,7 @@ def generate_docker_compose_file(conf, launch_settings): f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', + '/substra/servermedias:/substra/servermedias', f'/substra/conf/{org_name}:/substra/conf/{org_name}', f'/substra/data/orgs/{orderer}/ca-cert.pem:/substra/data/orgs/{orderer}/ca-cert.pem', f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', @@ -172,6 +174,7 @@ def generate_docker_compose_file(conf, launch_settings): f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', + '/substra/servermedias:/substra/servermedias', '/substra/dryrun:/substra/dryrun', f'/substra/conf/{org_name}:/substra/conf/{org_name}', f'/substra/data/orgs/{orderer}/ca-cert.pem:/substra/data/orgs/{orderer}/ca-cert.pem', diff --git a/substrabac/populate.py b/substrabac/populate.py index f19889e4d..03c0515a6 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -2,6 +2,7 @@ import functools import os import json +import shutil import time import substra_sdk_py as substra @@ -9,6 +10,7 @@ from termcolor import colored dir_path = os.path.dirname(os.path.realpath(__file__)) +server_path = '/substra/servermedias' client = substra.Client() @@ -75,6 +77,59 @@ def create_asset(data, profile, asset, dryrun=False): else: print(colored(e, 'red')) + try: + error = e.response.json() + except Exception: + pass + else: + print(colored(error, 'red')) + else: + print(colored(json.dumps(r, indent=2), 'green')) + return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] + + +def register_asset(data, profile, asset, dryrun=False): + client.set_config(profile) + + if dryrun: + print('dryrun') + try: + r = client.register(asset, data, dryrun=True) + except substra.exceptions.HTTPError as e: + print(colored(e, 'red')) + else: + print(colored(json.dumps(r, indent=2), 'magenta')) + + print('real') + try: + r = client.register(asset, data) + except substra.exceptions.HTTPError as e: + if e.response.status_code == 408: + # retry until success in case of timeout + print(colored('got a 408, will test to get if from ledger', 'grey')) + r = e.response.json() + print(colored(json.dumps(r, indent=2), 'blue')) + results = r['pkhash'] if 'pkhash' in r else r['message'].get('pkhash') + + keys_to_check = results if isinstance(results, list) else [results] + for k in keys_to_check: + retry_until_success(client.get)(asset, k) + + return results + + elif e.response.status_code == 409: + r = e.response.json() + print(colored(json.dumps(r, indent=2), 'cyan')) + return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] + + else: + print(colored(e, 'red')) + try: + error = e.response.json() + except Exception: + pass + else: + print(colored(error, 'red')) else: print(colored(json.dumps(r, indent=2), 'green')) return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] @@ -125,17 +180,27 @@ def update_datamanager(data_manager_key, data, profile): train_data_sample_keys = [] if data_manager_org1_key: print(f'register train data on datamanager {org_1} (will take datamanager creator as worker)') - data = { 'paths': [ os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024306.zip'), - os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024308') ], 'data_manager_keys': [data_manager_org1_key], 'test_only': False, } train_data_sample_keys = create_asset(data, org_1, 'data_sample', True) + print(f'register train data (from server) on datamanager {org_1} (will take datamanager creator as worker)') + shutil.copytree(os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024308'), + os.path.join(server_path, './fixtures/chunantes/datasamples/train/0024308')) + data = { + 'paths': [ + os.path.join(server_path, './fixtures/chunantes/datasamples/train/0024308') + ], + 'data_manager_keys': [data_manager_org1_key], + 'test_only': False, + } + train_data_sample_keys = register_asset(data, org_1, 'data_sample', True) + #################################################### print(f'create datamanager, test data and objective on {org_0}') From 961b3f83033e0d61d87c13d2df7ed0d21189f49b Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 23 Apr 2019 12:03:50 +0200 Subject: [PATCH 043/106] add more debugging output --- substrabac/populate.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 03c0515a6..171173782 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -80,7 +80,7 @@ def create_asset(data, profile, asset, dryrun=False): try: error = e.response.json() except Exception: - pass + error = e.response else: print(colored(error, 'red')) else: @@ -127,7 +127,7 @@ def register_asset(data, profile, asset, dryrun=False): try: error = e.response.json() except Exception: - pass + error = e.response else: print(colored(error, 'red')) else: @@ -190,8 +190,11 @@ def update_datamanager(data_manager_key, data, profile): train_data_sample_keys = create_asset(data, org_1, 'data_sample', True) print(f'register train data (from server) on datamanager {org_1} (will take datamanager creator as worker)') - shutil.copytree(os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024308'), - os.path.join(server_path, './fixtures/chunantes/datasamples/train/0024308')) + try: + shutil.copytree(os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024308'), + os.path.join(server_path, './fixtures/chunantes/datasamples/train/0024308')) + except FileExistsError: + pass data = { 'paths': [ os.path.join(server_path, './fixtures/chunantes/datasamples/train/0024308') From 366b5e9729d97e67c23a0a9d007de4c35896add7 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 23 Apr 2019 14:30:17 +0200 Subject: [PATCH 044/106] update to populate to do a data_sample register call --- .../{0024306.zip => 0024306/IMG_0024306.jpg} | Bin 280069 -> 279172 bytes .../train/0024306/LABEL_0024306.csv | 1 + .../{0024307.zip => 0024307/IMG_0024307.jpg} | Bin 274696 -> 274405 bytes .../train/0024307/LABEL_0024307.csv | 1 + substrabac/populate.py | 27 ++++++------------ 5 files changed, 11 insertions(+), 18 deletions(-) rename substrabac/fixtures/chunantes/datasamples/train/{0024306.zip => 0024306/IMG_0024306.jpg} (91%) mode change 100644 => 100755 create mode 100755 substrabac/fixtures/chunantes/datasamples/train/0024306/LABEL_0024306.csv rename substrabac/fixtures/chunantes/datasamples/train/{0024307.zip => 0024307/IMG_0024307.jpg} (91%) mode change 100644 => 100755 create mode 100755 substrabac/fixtures/chunantes/datasamples/train/0024307/LABEL_0024307.csv diff --git a/substrabac/fixtures/chunantes/datasamples/train/0024306.zip b/substrabac/fixtures/chunantes/datasamples/train/0024306/IMG_0024306.jpg old mode 100644 new mode 100755 similarity index 91% rename from substrabac/fixtures/chunantes/datasamples/train/0024306.zip rename to substrabac/fixtures/chunantes/datasamples/train/0024306/IMG_0024306.jpg index da6e938922a8a005d1fa73180f9df1fc442a2fdf..6ca016f4736fa3cb5c61a7358574795ea73de366 GIT binary patch delta 16827 zcmbq)Rajg>v+dv(2oT)eb%IN98Qk51JHeg7ZD4Q+9^9S5H3WAjNbuk!xWmbR&bbfw z_1=EiFTHD3@7-VZS8G*uL>x|d1P=IZ?QMnFV?heHLR{hJoY2EfC^!y&>UARr(Dkl^7l z;NTH(5OJviNFQjpk%1C;wCX65pYcCxxT4ZYnTHVYB)JtdSTsp%YP)-Y8)yGPkih}| z#{t~`@c@s2hy({fM){8eDI7Qe{D069;1QAjJ^teWhZ+Hw8z7;MNb~sv&{Z;oRwJnZ z$=t1>AMa!6?Dk)&-KB_nU+{R@sDqO(-1J0tL`@ybOcC+z{aXrsH~ zX`MZ9DtYXLSieXsrT`UmIi$6+!es7T4l(3i6mq1;)qpU_wMg&Up!nMGnKK6PF09t) zmn}Vhi7UcfR)@X;Zj8^1pxFLBth;0L!|QmKX!wk_J6M{QM&5>ANNTU!-f@W_G4F#~ z5{FK&<}iGequIfEnR6~ED?3>4W;BEhh?RxX z7f-loyxwXL3SvwqMKCYQjjihA%Qm}#VJ!gkFw zkdLD7M9MKPj*ojlLD$j1b-8$NWG>Z zKENKelIjY~?D?Yiu*J&yeA)y zy0Kp+pbEUin!Q_m=Q6*~rYuygF-!{&(pd>eGFw@@d;^v9-}>;gCuGl6knmkR>*=5Z|k1;mmX>w+<* zYO%^6nzrG@p6Kk_D8BAuJ!)r2sq#)8Hx)B<33AJ z!8z57q04DW85+t5&J_f%8u(k#tdY;Hvi_PT+fUFm)KpICJ+^cf_JRx(KU=;5mfqFw zeTDvDQvr)xJ+WmXwW}4SZQD8u*fZ|U?H})H<<9rGSPQ~WZlnUYcG`YNh8)vOwM)I=@{o7-f>Y>^Y7v z3K^RYQj)4Tl_SA9PIBJf`BX8XLNf9KKCWV-_fZr)x= z6-Lsj)ja1@t^o+FK~5~@u(Msm1qMzLa%N0?YsN$Fk8rP_EKRaz)A@9B7rnFlr)YLX zjlfYIKc*^gF!&0`fu~ybTIA4VoCn54iDzg{Ug^Nm5Bjf-@ZYn?n4@S1Us+4W=W%1R z6P+w&fl^&RwTB*x@YeH4Pu9OTu04DR?yp_=$wZP+kKyFum^508YMitgdTcn)(`+Uj zul~1NnNyW)!77hvBvs|vOTB~NIpVIY&Kf+m;e(FfSA}BiE%Zb_8SG+E2YJ4(58*e5 z^Wkqx&6@?f-^tgiYS3|8>mt>31^uK(2ErHjt{XmcB9e%I_Z)27OO%FgTh;{7oOdBL+;G z$b?kF`UY^4iD@KKH#jC9FjbB#sU252Ym!f2f0-Kaa>!4)@g%II$1#5yZm;ul9`4=Qh_@v~yC zRY8Fu!jydHAaikO^&#Q1nJDs)Fz_&@9MmbQMvZU7Pk-Z?nEhwFJp(D3~!07)3ri z#CBQWLqq-4Q(IPINBQ}n%eIXgnz)X2LYW0tW+?|L(f4q99)13yK4Bvwg@abkXMfu--TYzf73Y|#(GlC*MzSq#;d zB-p%Ap5zG6VCYpxLMBbeXke!AM`%*24qlfV0z+$qbv&XTl+_J$^yGa;VR$q0J!+>r zl}$5o!7-$6MxchI7-vPe6YHUBK}pOC+Do)U7%dfp-DC7`2437<-|AS(H2deU!td@i z^2d^v_C-8MXY7ru*2iu8Wza33GTZMJ@d%ZboO?4Zyf4AHXtc@SqrjrN1r=qLXOF_4 zymgKTh)I$&VxI#%Ms4L3N;-gpPf6HfS*NdsfBB7r&ao#+07NenD2$;4=Vrxlk|BiU z&(B!R&D+N5U^w!HoX~SO}JW1B~IU|2Sy*I4DX`Tiq)3B}vz^ zWg*-FCkFJMuh3p^^oTfG%E+sWa1T}%R@zm&mhsBqYw@C^!)-^OsG2U@9gS}QGD1rd#2g48 zQ&)|zgA!C#zO;P0V{0(m_#A!p+4EvDU6Yk;QU2dIISP8xw63_R|KV)s`-% zdV1W+a-rD>gJtVYhg|S}pQjK^gCuccn2N~Gx8Ju>dVy^-g;)v9UIc~NDl9ZgjD|*x zwMYs0)JJT#O}3h&UW$UJCsxG;rNu21l<37BbQ~JxO(v^iG+U6*0#J zhEOnDb=(_|Lf712Ub71uon02zkauInRgL9hZIYXD>O7$>)VAC`Y@QpKwvXleTG7?K z^rfxC&2jurzY}AZtV)TKiZgh+>~vKbZ94 zV#YFNfs?}VHY(-I6H^o1$dXeHpT{{=97^m4!!?wiwiXi6z4aBMn9o|9SdQ1-S=R=t zn8o!vLd=p?Djio$H3zV@Na}~{unxFZE0NH1giAoNy{%E9XSj$?sgWEO^jW5WOxED_ zaUMLlnThr+?0Mm#RrU%N;+4e8L2VZcQ@foA@EZW7{6(n*4QZmIyWvxQ=sv^z)$Xv8 z$6orBrTC=8d%;jTX}%4l(E9=c4<$gN9OJn^YbOh0TfbU*=LgelwXFbv-}u79+N&S1 zVO94>2?H-BOg`gI6jV{cP^nf>D6ON6`AglGt+B1+2z>kos1@d%`?o$0Lh~ajIbuS}{9TWXfi%RzI@OczMWqEV9g%N}j|qq0_~C^&@5RoKbt$G*DYY&6 z?-{q!F}I+B@BY>E`s=?y*m-|EeVk_)NczG9`S_vbKrqy(jllHA z!=~uTP%ufEA3i)JaZeIYv^`6=)P2m{>gr`b&NbEP$buGoGY=H1HK%;xlp?hNgp0ObcT_K*VR|}MNg2ZfNU~$b+jz26S{dw zODmOmohVtQqS770YNl(S#nb%(xR5h39y=bg>If;yjluZbhe$zq#xO$Ytvj2qykU^M zxV;v<4Xg?K)uf!;k$vQUrz=vh8COgC=K$%tb-fW-ktAUy1pf(uhaY2oYt>Z0kd2aF z`_8Xtdp_ZBu`w$%t?76-QFkl4;Qu?m-unz_U2C3?v<_IVCqwaHtTzM zUTHOq_X%!es+HNK==X;_%`l$ssm00-^|WVoR`%L^pZ=^O%*$0E4dHF$M!pNj@`ky( zQK3QP)I>77g#0CnqVSyV}kQ;v=or zjneICc>FLXOk6?P^!x62;1fDadBPxUCqoAZm!TizQLQ2*jS{sb88sP;Yee5^$2$xY zSq{xn600*dj<|0~s^S~)r<}Gpc}(242A9E8iELG!vNaXomiVY4w0?6?xZhRVw!wad zCWRhc^Ybr>qOB$IwFYRMjiwvdJkzC=W+Rg>>T@Co_?>V;a!96i(yW(A@6~-gH zvhiv(*%o6zsg^0B3CAybKLrl|z5&p==&#z7I1hD5k&Z}`lEG^h6QNv?q?7|^mbJjL z+xVG@YE5l==PpP52}_*&jw-C69RU$t)w!qz7I$kMCv%wT{m!#Ta`UlD3|r5B>0h0B z%+&3o&Kc0|xz#E@cuC68{$Uz+o~>3-cyZz|a*ohr zGP|v68NMeA``c_1@~V|EtU8xUB!k9Q9@C>3IS`e0W!xF1IWk2{z?*U(H_^<#|IYiu zM*Fv$r-u+fc!0e?M>v*E0mc$6&W9ArF_^gKmQ*2XeoQ|KoxcoeB_e9mK;4ME2%{p8Kr`l zn#=W9EM%kS=@gZgSgO_d&!wqHRge~LXpBObHUJto16H?@B$nQoI6RWDJ2)*?UeN{j}=)l*Q`ke z@;^7Eac=rP-#4|mw^PP0nm+x+s-~uyl`V({lQ|ixyBS`bDlF?in(2$Kx@yTg3y#wK z#N5OE`l|3R(YZnAlf>j#pF+GHT}`Umxi3DbdiW5%HK>gY%n|US*24H1{jmSl?yqC) zy>s3a$rsVphe`Crz-H`*sDmH>Ne{?Iz;jR+WGL)Snh2 z09V1k*j|W3m9K@k=5;g|7KFLm&3?ap8{16tt-i+GX(jyOU3z*D0e{|e${$r>ojQy> zQHgp_9mZ`8NGBg4QpY?n6VK=4T`hEecfL@^2 zAHTniLuZS3gm;UM4Uh$~X%xyu5r3%hnP!~xfpw+-nLQvLaoz4(a4jXLr+9e+R4HYty zQ;Nws113r_>xIa0zytFSoDJH*fcnqktwxJ+ULZ3`ZK*Qqv6e{n&S&51tc#&+;)WH| zb)ZjWp(=4@|1=~Ie_-kjFrKupSAsKEYk_XAfmJQn#P&@q{o7w3+LQ!QSxs7^YL(fr z_T74ft6|oaBMoxlR9b>lzD9sRfo-im`*Q3!vf^z+bO1XU$85OvFcJxv5xxA|Tsc*U zWOtyK8Qfl3O|g_E$Zj-fvkbL2lZVRe^!LNQpKSBXC5ukvW0DcfmjJ^2Qhi83=@r!{ zZ2z_E&@#HimN==e5k%!k^s`a+`?M&hirYd=3A2;?*SMY!o&+UMF$ZY#^vIvK$2YBK zopXz;2(8cp4f83)lUO>zofVRvwEC!T06Ktt=}W)oAdctahik{{XqSkn^$gxY!`GF8 zzt->7i55!Ft+4OHrJrNY3s3!b^XjDO;%TF2aDluH=ITqSp<2lAJn>4uk|K}Rrodfa zS`Zrxl%Gw0!7-e~>4rhwZYaxgXhr9}MM%HOKIQc}oBTpEA^H6X=8r}E7+Dyo8x@#o zJn|02!yZOJhfuyWzjsqthVsFRMr5RjGutfyo^nESF?l(QnVCde_7JD^`20@}QSrw- zC=ZmLJGmVFG%lRG8H0RQty&KN0<4toQPhSNQKc8tyOy%D*L2bI1$=ZA)TJpZSM*5# zoaz*3Fucmg@iAW%e1Qz|MUA&KpEFXD%#S=U8$($$Bc*u*Q1l`zFf6If+u!2mM8<85 z)#$!65wCsW_!y_Juv?DykFcj2Om4v`oUi`EIBsrZt@z-x0<={+*PrSu!dr+02YVo@ ziBLk@#U2CYo9#hoa9$hMxRl;QXNwUWdDWf=En)+?qA?6$GCoWeB%-QLzZ4{CwPt%V zq;C>mXyJ##`GqmW=ppcZVGYN6wR=q}z`|rup7W`aP5#jwV+D`yO4r$Z`V9BEoRNa=2lYrsUt zDbZueAp~yZXqAa05$Lg^#shc_b|!dal|?sOp*P8{kLXpo_ue_buiyLIhpk&hdz1?V z#__fcV-U$gUVz+&bvJJ6#_v2yF+o6W=$TSbwr!&@c(P@t-$nb<00pP2pOov8-BGymH3gUNfbZ(c6TDf<9a*YcDqSM;dkV4lm$<-oZCgZ9 z3`rVh5@q8oORob@c;f0$hCf$E6eTx8vV{xRvKf}nE~lQ$^>I17AJ(#MwygAauHXz!Nwqk;P(2q~UJ#N+ibz99UV)z6ZCJUHUx# zrCGQkaH3{g*G}yMvn4%n#);Y=b*3pZ``glcLtpoV!M(aqjzLNl#ORfkP07dGgn9X{ zbdyk`bPTiKQq{o(iw?T*sp$+cl1p$6C*Hj!q6%%tBtG`SUI!^oJ7MHqwu`L3K$z6+ZOL>va#hMjZ%wQd5 z%>)Zhfg3SiI!})1$$1#E-?uBuO?MjN&39(XBZ-ZAO`9A9rZRT&Vwoh@ zLyT8Q+(0w`#Z_@eQpPxV!;?v`z$L;RZh=?(G*4c=M`J48u&JsdzuKFYj1ZI9Np8B<|ELtfE22Wa$)1?7JPgUS8f!{pgCVT_XTGZjvdMVHvR$EeV+Wl_x6LbIxpy4&97V1OkeTKvM_4ZnlDl3zB z%}!D6*+&quP#Ybuy!c4Ja8}%S?EE?uYB)e2egpi-ZD;+uTYSQ_Ie`1#xa!1k(kTWwLHOqlC2jF}*~bDCp=YM8+?Bca~jm%lw%q~Y9KwWiQTAUgE+^T z9(NqgWM_C-&}3PWCL2FRu9BI&+h{+@7!A=^Pw!h^`6)H{4Mp=5?e&-slX9f;-KT8& z3PB_$*0kMmyT2L3&08b5p!ImVI`*GbYY~Pbx}qF+*mNt&hO4Q#U{7N$;NuL8B=@Tu zlAF&7|AL(}rK3|_fR_Tp&J@}6a`H*lb*}vTPNk|w{`E`4mp^=Ic~Pf@cseCIWxA`o z>aE){1OPQ*gufDvf(f!p2HBlS5&BY-h`RawV>V?-z#Km1SKZG}LD`VOJ=g5xA9>_A z)*wRVMMZnt54YS@;6!mif;lR^eN~N`>gDVfe5NKfy0sAWw8qKKDyQAp<8E?+ zW47X>ft=x=U{CpWde&2~KgJFD&m4h-jE;{kj3*R-DPJ5;_yd;m>@T4U?YN3ir}(M^ zRl1*Z*nwd(!{q7(M1FyY39xhr4TB`os=#^aH5+80kK2tWucxTQ^$oea#M)yCds3f< zD{F~w5YwmV^Y3V1hdTky(!s%n-1d#|Sc{(J!szeh!RSTBFFp#Mt5 zudS-bT9$H34IpfOca@)?Sz2cIW3Nj5lAw?2@6KXN-7EyA<8&Q8z&GHsp%fk41zzNO z8DuMK@C^Jn@_15202*@nG0Ee{IeMP0UXL9!1r{k)uvVSqvw$D3YctM zBc0U%dqi&Ey|EMv)7}hQAL((%lIo)U&>u(9&LU<<#hv1IY**c|s}G0Una6;Z2n9k2 z_~}dLlyADfs4ZpXN=i#PzFs3=IE!f5pw1dgerIkALQVMz7sPFSh`|b}EP*V9IDO1;k=J`Dw3T0i@pee9-=1QzMpGq%m zz#1ov@AtZOOJ>7qYXoI+fH*{r3Ct}oU4gSz+bk#hb)~PFuXX=4Gyh|}=|2zPz`p{00R$Hes$sGpa{XLJo@FgMlU{7)OzFhvz}!_Psz@}x#%`H2 zkE0O0c}#z`xtrZuS8!oQ6dTh{vWi=7d!^vHI`{?{Ss^ucnAWvZ0Dj73S`e zS8{MQux?q({rR8x47*K^cHeXG66jq$?V^M7wajVKy#KloCVOj06{Z9(*7(`MFswDM zY2T27UcX7lMPBn~?EwF$dL+N0B(o>aL3oM3wzmVcJ4H-R#!8o?gRe-NCMFaIu8HB}B(2B2Hc&{_ler@wZ`z@zu3s z;`MPjcxa`1zXo{@$@W)xFv^Goi=+GKan7-1%Q)VS{Htl1so)zJke6BPBBk z`|BsXw-N)VC?HQDcoY1A0AT1M4KLn#uc6w{KP^U%{+*VxjQ);qJ!7y@mVXT?=tUGl z!&dD7wXWqVfZ}9I-wcnS`ncMc6NbbgNEwcl+SsH|7i>S{9sqZVqW7yz!$5;?GTi=7 znQk!SVZVKR*o7SYn5_$%cYY6JZlun5o|}HgcQLW~E$#{*nM9X(92@%->(8UR=eQ|? zD}#=ae6t%Vr zXQzLJ3UnSe1E=!o*X?z!;fIPAZ^Z0#qwRadQOsFuL`7W+&o}vjxc8+b!Z2RNR$4fz!f`ZQ@e)Xg&+&1_;5*6|}|){Bm2*T7E^UPQpKt zgsf`q0++x%`_n-IH_`rHIXh)E3fA>!yOhYE{OYpV%=wISI#{{uW%~f?oWZ510T)WV z&(}haS;;RT3qzawbQGKfv2*vp#5oHMRzuvp4+%q_Trr7lf-*x$e`;yFB(|3gozFJZ({=E(P*cDX%3;Wd~Y+0 zHkQ5~Ayovc4(vJkM>=K!kHXdBQ?cyi+|U>0k5Tu&TYInF%FGhU95a=)#A{Hi^!91N zi$M9i0^2+xIMLYrbv$`Q46#I1ME?(e1z34SHz0KJ7dcNh8e^`|Ph3kH)VSF6)9uKC zwXI-oiMnUgya=Va{!UJ|IOG_6y$C>kkFQI_uGQ8uZCT3ospw;pCX1>K!J?*@#p-S^ z|3XtZqgxWWpwpD1iT{(~dD>dV*61g0i8Ovbv_d^Drfp+yvp#!C*bEiPLHQl`LJ9&X zB9^r6A@RIv`teV#ps{na+sHUU%o6nfzq(FOtHGXW*2gezeer`o*cn8_e2 z%E5et!-`}qs|xnsxQOwbt>;`@yCExeLV`rCGD`1*-B+Pd*+?0SeGQPBcF|6pg|ig) zHt4>dh=dHQNmR9*rt?r?!luO9XF8~V?V!_Zil4LoA!%R|-Dbbz;W#yqFp>c%4W8>H zf_1C*(Xk|0)qwE&DB!kk5MLwf+VdCU(Ty+fYRUvA!=~QlJx6XuW1a_J&MbRBaxw_* z$X?1Pmt=m1${?j!I7Tg0l0G9yM({Q;ZgX>zeDT=*`fH92@`ss#k*Mm+)=6>nxxTc| z5L4A}t%v(*LTHpX+sYDlwUqom2+U3%d^Gh8&uknT@l>OX{rxf9R4r~tBK`aplRr`@ zOgHMRNig1XgFC0PfYc{R53ENfEx28+O2`T|uVVYUy8RP7pKwxWXwFD_Y@2v_=0 zq__b3ltW!PB}>d!=xxYwMoC;NY-S1HX>T>sHmgrC3vTs0$|9;|g#+QVfz?RH;d!-9 zUqv@fFUA#nZGan8Ms-Na}dnL^n01i zgOy)?*KD~@0K>UOr=%z6}E&5v`N-BwNb2JwkkvQ zriru&2X~%s&%Fl;5FQ7 zUriEM3a!rFh$*LfDM0v+EZiU zUy2wG-! z57#d{)n@XXht)s?)q@+r*GXftt1Z{X^CF#@W$uoW9+ocm>@zu|pgg5d+eBDR&Bc>R z*wh>P+8$;nBUI?d;K-o?W6zWtQDf$CURsuk0^T^I2c&6YoXTNQK^p=8WGWsyF?&Q; z#bM5C()G?3si~ucC=VCuCuQ4MiG`^Rfnkjc z(>f>VN?+5JhIweFX0igKorHx;^sFgQ-Ib~OZ2;(RSc^*)*m2%@wh|u~h>yWUSAWCC z60>dsoBWjJc71i%H)(W03}Wo4EefFAUnSyf^NX2s49Z&>Y>Ym}&(UDcz~p_r_p=zx zv7RImzp9rbWrO~ZP9|JOW`RHMBY61f06Edn^ki>0j|Qa2Ynyd6iMG8XxA3cQkd;gY z3gj{pj15+SIaW)1&i$Tcb4|yA!0^ikcWo5-waKqqS!Acjq13HOQ$$GpwRAv+K*a$)e zYCLA+pVJ9=L4N4*HWM^*Sgrm*mR zw9YjF-06DOw%1%LSkUY8^@*Hs8Voi>48$( zg@W0b_%4Q#sZ~dc1{cdea%TxMK8b`B^@r6_=%)Q53J$%Y0zC1wPCA!U%Dj_5mZdu6UZ?z%- z4;5xi(m8W_S89MrQgvj%0lWhTKb+Ly3Tz=W#Pklj^OcomvR`rX(Hw<-amm}B;-1_; zYRaoN+rpZ@z;J47G?x(=kK+a&Tv^JBYn0-iVps5=m~r(67jL*)_ls@gTHvQX_^2g8 z`GqW;+S=KLRSM$1jg|gdtMe&f9a?n;pEMX*c2#>bDHV|LFeq6kDa|JGAr08u#fOn& zuO|}JJ`UduFQ)$%E|Tg{OJ5X-vuMk4A6tIDfgLfH4n+@ms!vAU02i+&%}AKXiL%Mj z-7KQ1<$U@ioX;y9o%`6I=hgV+^o3?wBKNmaxhabTj~*Fa;5yH&nmCB2=3HcuFjz`cLzCP(e>sJNCne*3j{^5T6>(H z@b^%XNozS(GDwA$DEUX|{COjA^#MRKNjtCe5fR|5M71EyR}jr{*;4>&!Bnu-sQm`d zS@ShzPIf}t;b$kA(G|9!y80xTfA05SD$YAv5=QznC$jz{ ze+Gs-(dpi2n|KzgGM42Pq}h$+nV;i++L<`7)!0uyloI98z(_11RgtVlLze1oY1Paj z_ezmo6J#@uh>tOF8P&YQtuy0XXSJe)R`c5-77&fGxnXlqC+|Zr)LVXmH&`sKaBpzA zW5eAUYVT0fvMRqXLu2H!8jv(rkH1lq;T_mFN-iT;4gfe)W~B`LlI>mRh0lqddx7)x zt&?p6z*Own$iv3G{qCNjOW~&Px5};FJCCp^E_e2zoz4chekwR6eBMuE+VLMQ^&oIQ z-vU#$s`AU2%bdBmdoc#V7NW?dhoBhtb%GyW1Zqy@yD_i;HLK(2WF{RgHR7<9?lEi- zRgJ4S{2m2>EU#wCBjOZHe}`D; zz32qs=jZvgRF7?GFzK#bD!PJKF@sni070mXRWx=A!Pw3_`(?szhDHZDcS81(>hz5o z$?hL$jh5b-kGw1)7EsOt z=!&2PhfAyI5sow%DH-Q?)4%4J2z3S;<&YA$J_)pdF{$!H4dj~y8NATg z4URgR922h9`4`tSv9g1U`2s^PV7jv&*W9it0eMf0OGmntD!};UauBjOlS9LBlv}gV^NU`+cEyB(qIiuMSP+FT zi5SH>k5qcW`!OY@fLL@2QXTJ#{-s5WmEhUE{)RvizAIr;Yqh7aq{E=Y^TRuyt<8`e zCvzRw%~mZEcZ3b($*v*bG^LkA4!_-qXaLfS=^u@M{XaV3s@$+gt2H3Tt;%4sVedF; zRH#Vw9@O#3#I#}U19lvp^g{++&Bc_Fw_mQW>ZV%DZU;|}rl#820krIMXua$i1~#-=B&b z+?*mXe%gDOyHBsXy2Be_m3Vi}M&X(Zy3$BpI@vBSrnoJ7SVWTiz&tIVJKa;Hx*L4N zONha$=Rrqb1;2#A#j7B(g+XrR@c8xDND|{jsZ4b{XEw)S{h5oY`q2wVgq)@&QO2KN z#$Scks}3gj4ukNf8^>vL>wK*N3 z=AoD&Y4fe-@aT}7MLpl&#_UXTEPKwbyf{w5 z`tpd!gOaP9ykIJd9z)%QA%n?dYFkbiI`{2{tIL`4O--SrSR`@(Ge}2x5v+7~P;{@a zcdA244L{#5WvpbplO0$;h|h0#)>=BuiWw0Uke2$@9fc;_pnq zSFpu!QX^|B)7|L1>NGXzq}*UyKT;j?^l?7f2C<^9ZM?Rf^{RTsJoWlc5BLB0u6)_# zrcllPjV9F*Em_B6H%5Y8GL@o*x6J<7KKYBk{P&;c^!+ZPeQ$t|+Dlr^8^)^JV676< z!tRR47@m0JEd$`)t2jQ#ugR3nJ){ ze9E7-P_F4R5${(OB8esD;J;S@{-7!dxyCc!fl|8D~df$4>RJT@lr26zGRNImwMY{DM?79P5F7Lr>ndtku3r zYxXGO+AU^V|3Va>F1EY;1*)$9T941=x=;~Q2V0)0a+vYGjuYH zem9_&T~JV{Az=XwU9(_mVUm{H@4;|=x_PuqP3KBaBi!@lRhRF??hDlhu^Yi1qm+vp z-juP>tBwo>#!Z(>hhGw=1tslirz z15i88Xv(Qbo0MsxM~rJ#4||c}qzFuKhET3!3puvhE%bnMZG?-hIwre<+(Y(`0|KJe z;V~&>)!?b#S3qSr1`oA2zVN-C!;xDIn>yzI>knyq20clRyp%)O7RT;9dCElX83`bL z-0xuFx{Q2pfNAaD5yTK=tweMH!AmoBMGQ|iJaI%)E5Kh3YmHn-7+BKDv#a=P&7cRD zgLSex@;jK#tJbXXt78-tzwuq{Psda(*1;}hqEAGtR%&!N z(YRb25eCe+=dUl>`BMn(YIt^P+&Bwg>6vjHO#rXv5D$8wl*OOL@zS%&F)+PEFcqh= z9^`eAKQc+y8_%qAJp~`clFi&v5ipGXMM|1i)o&7N@Z_Rv4KYf^57Xy0xU)f&WtTioj*IEC}@f3~EWZaL-8&O;B zo(^ZwBscw=Hb!hJkpTByaWCdR+e!6T_~bYXA6iHdu_<0en~D(KgZez0#Eq=#I02hD zQq<3}6#|D?N-|S46#@BL%-7ldVb%ft9x|{;Bvx^86&`v&S^N3UMLrq+GRqs_!IKN0 zt_peCQs;9J#A<92mjw2}=ZKXnN|Pz3qBsDQnhKhUYw&1J2rE(jNi~sR3u;GGPAb;@ z#8K}lHBRf;C_Xm38UG_VRA0@hVk^5rZb7~BdANdZw2b8M&wM0(6%CoqVF}KbmCxWE zg);wg*v{0ou7;&DTM|>u2iec0>_n3lByD0}2+-6d^H9Y2&zNz2S3~>@Ew&m5QvNOn zwU=FItn&`H@N8^1c47MW62P=CK>%A!YEoJ_hHaQg?B~J_{=qMx<9PkRkG~X*NaIz_ zSmf+*=eyl)oNIWtE;@uWLN2bTDuuya55roGYsRrn46|HutjzCz$rVO})AmRaXBHCj znr=WUXoLzHTX7+ykamIXmV5?*ZPK=A=`R&8xJtbR*<%mi8Wo|z7bHaoU|l<;=~vRg}B%`Cc>iHL85qgjOb^tpJ#U_^Zj3QcH)*n8$>cS zPBLygB%#aq4h8g(Rkq1aAIxE0G)ajQX|IU$S27JYl1+BzpA(CJ&K;H9o*5x z;CvNX8sTvBt6kS{*7s5Z<6{K#uCBWnFHUa&Xt1}^1Fp0xj_^!;4Uhb6Mr<;tXR9Q&x)hr|+s|H~@Puk>k!w|K?JBNB-Lg(PvZliot=R5O zqpr6*zM9pYpwcj3q^w=!L9@~|R=jn3#`N8XoQ)t7&r(;d0sPIXem``mh4Kk@FGk>) zUE9>HwGEWb@!j-L4dUJmj0*Yp$8yPy+#mY zdNBX4o3Qz429~}L)fpbU`nM%A*upc}69h8abmdh#RBDKt%zMe7^AZu5UbH*Be*M}v zBsShtIG7Tu6>7+#vsV$`5xZTa(fJ)KR!*36tqrkc(`&ph_?o!=Yj(l5+`@{koMi~M z7eMk*`r^{0;#VK>Y0fp(|F>||xr-Z&^vjnIDe-I;f?zalVMW$KM#ccwS*9W*{bv_} zccXZ*B33(U&(xmcL@(8j^8jTmf!X<`x#*#)PNo9#l|Q4KJnrKQS&8$D?TYwBt14e= z*Ed}73-gv2m#Q5j@+8Q1(_H4^2`%A<*23hLljSK9adp2LQ?H*SCnbiCnes;l`%LEK zx_yEcoPw8VVCrd&X?#yzA`8{9@~?GN=f0RC$vqIXFO8MQ23%od|Lz6GE`&~6P4eQh z9L?pG+R)$%3*Q#39#aooyF=;w>?LsTsIP)WvM<3m1&)Rsb zo}OvwR||*_We}YbQg|YR*?6h7(}#dT?>k7`N(Al+J5p3omUY3JAwwll`Nb47qIW>9 zKQumJ`&(Jj2ZTIa7o96zyXE|&WwNj6;!>y(mb}ld!`QK7w`_AQAOPdc#!5WKq9a~?7mrA^VU&lyil`uHV&(UMlUP=RKV*YP&9bdWEhWw0nOYL!r z>~6D?Q`Q7aI-dd#)@*M+KcyNrXg1_X;~`%TWrbS1mFOzB+#=O2031_h7wAvDzw^?4 zx7_bR?p_(R;Sa<)+P1z|z*&8AZ=3RIfvUJ%EjA^sZ5U6-XTq+nY4$B2K0A6CY=PmM zoktIQR+&Egq*s@y_;k`fke+RpoX#)MrD$NncaoU^F&}8BI39^cA*{`ilwD2|Y9LjT zB*cnE0+5GX-HWiWc`*0ro>|t__2jWj{C)_BwdIqabT^(2?R~w-H$#Ta#S9|*M#Bp` zVaIl&xoV}_)rObG@@1y-sS@#0L@+np8$bkJ#*?t(Uzs}wjl_K(VmZi*R_D6M+9TTA zqKnCHDi5nwPE&^wQ}_G04w=SwVGo{BC-SURFrs&iqoX{@C#`J>zgPPP**QsCGO7Ev zXsFE3SG}9t-qCoH^~tYzX?ek0^8=w~mN$@|7}gru#3K1s4`v7*`DJDK{Z|0h04o1*_UO$|wldNg zem%31_^c;uzMkqz$r>8D0Aq;qMBL_2JW<7!Nqq@uR%xURtll{!Hb~MX3wK=XUt%nt zN64;vmrqhsSuA4m8)ZgphU$<=bFmj3_v*d!+lXD;S(o)=!XnZaVaFe(gMS1otEkem ztUwz;v&?`SK(p+?6F|2F0Ra?1w;e$N96+}|fdLUfw^YFa89=vk0Rjp@w}C+d8bG(8 zfdU;sx5B{!BtW<80Rt64w*^827eKcmf&&>qw?D!IA3(Qb0t7@r2i27480-Jp08SVl delta 17745 zcmV(xK*=0XIDS*xgM~-HZ zje1fcf|+aM0PdI2x!r$~$wgNh+-^;Xc(mp}!{$HM0H5IggJ8p914IGup_0T>U4&lP zl^%Z72NEcsMhgfUti`-#V3KOrf+indRCy;S7E)Eyh}7+blPn<9}3mAN-M`P4NtB3Q)D`)TG@dEA`#<4 zeiHPkHGiB`8iJ|*4TSgR8;IZ>_t-C6Bv*M3>j>uryo7r+Q!hmumkgFW7D&I`B%Nsl zbxi)Sg+xGFu(8h5>s{s@FU}_6(CA`udVRyTb4K1K<{P#Eypv>`8w2o%(dtzs2hCCO zW>4eG23y{A^7t8%Ua@9OAu{NCSaWri(afm=Y=6MBBlMmBIQQ1BRR_AoRo4806I5meuIz2Xu< zqFzUJ#P(gDEn#>_Cv!s!(wCeN7B=0xO<8SV)Lh=MVRM|~J00HIfpj49&HTHJTY zkuGo`(PhNxNc&1L+S1CV1m|-z@|Xu5p1i$+6k9`Sv9vekXX~;Q<7;4dWy4eu<7oM$ z&ALYbFM0i$q(eNv4$dYKji(%+bONas-+#rJ5!e2YW!*vMUvpLGqZtx3{qzGxI*Evb zW-M1ri3@SvLiHr9cEed%aNP|$PCdv0TuFR-Aj|OR%n6ci;XQGm-t6KmMtNlU2)r>=DV!g*y6&XsXW#2 zW)W4$0}^h|-ph&sR>r@J;D_zlgMZ6-9SrCDlN8(xxeW!H9(+fmV4VggvTD`% zmH>yOS(0QP!K(%Y%{vjh&tM!Z$lv#}66|3_*UR*W!SxP#?CMHwDKPu%AAh2hK^f=v zg{8WEOy{WqA95Hfn+fk<(sWSM98@UfHbMjmCEC8EI(FUsAG6v<(N~{BW>O zhS_aBNZ8pwZ7GA4z<&>3TGF#zib?ejq@mEM~UfJNuFMqlpO|U<+$C;vNhTd69 zCl+vGvlAUHWPp<0zqN*+ig7pcNX|BXHmyH>4j!mm{LM(5(17OX?vONAhisIz9eQf8 zz};dh6tDKLM~Op)bkQ=8a5Po<)l;pL&ne=eyxwYh(;F49zZ%KNOYntkD%e@S9{hUS z5W;5$H7^t8`Ux$_{XqQf?O8|kR` zbQ5!>$$J9-k{Rr9OI zvS)f_r+>Glz;m~k%%JkT3M^^_nO_?G2Y!xH751nbzgdP%o=(jQx#bpE*uXgMy(k>R z>{? z%OL#CKDOH&4-)FDmfE@+JI2QgS+QwSpD5|wFn{up6ctKytt4Sdl&{ocUQ4*V6Haun zW4yX%=Fq6IwWLDacOSQiE&kM(S$x#$Tov#&;tP>-N{l1ZYb!K;k9kKhmVJkA2%4ms z8_aB=x-8D>iS!~%Z~;ZFJ`pr-K1E^td4eLTV()pq$v?b4RL?EqPEpf1Pe;~o7=}A5 z*MF;ezE{;e8y6fy;%W$LOp0+*fH|`ot`U$xucWy~Ifl|u(%U{q|Dor>+4rf5rAV`T z4J-QTRx5WZVPRLyjc~!%v}SeMepn9K@h-RdSs4#kRmE{I+sgA6jDtdx{4+{Kr?9fT z>f%}GtC#laAQ5qLM(nG<`%OEl~Jz33mGQQ#%k6fuDCZ4!wgbnw!& z1V$o+pyKrvqorl{cHek4(+L}k94%GIOF_uQ=yuYQnYLdjTRnTg05~Af`pP?34WDvddMx#~U!|YhZV2?QR-geEZ(oC{n5GBu z!e^|L-1PV_R#s_lH+zL0ETrYsgnzk)s*0*?Yh22Cr13O)P*Guaqt8@KR%}luHUSx- zr3s?;1kb5!Mz?_pD$3tmzdo=wnr?oJzWL^HHI=TxLb@dP-;*2!JgD2&TA6jUs{00L zf>G+qmQp?3A7nUDtc7G6Oop9t|6Hcv&wwPbV;GA`FLyE9DLjL0G6h-iO@H46gjg%h z)k_VBM~yT|@Ojn7dXeFSm2QjF&9LV@^Q%ZzuN$i-@|`IN7Qf7;TIcfPZVM*%u6xN9 zUrqa&NV*9vdWU#Ykt-ERH}wyX*dtwyCyEQ(pQ2w2ZWZafY%)c!1r#yRI=)DD5%*1- zlovC_1%!|@S#~}ejX~C3p?{upi<@2D=2qZO<0aKi6{77D+i_~#p{-OlT)nIwo9H&r z6^EM9H9T}>Z6hsle2#w-W4&}rpA+Py)XaKOWVlFt=!Se=-L0t>aVqPnbm7|inI{Qj zUs*mI_v4_)GGrx%<8D^TRV1b+xRNHP8oW-hE8Caa4uz{LzU(X}qJMhn$wx6=v^6uI zZn&|m4^}gY>2`*gCM#DtteR*HVrddLjMQTsajsP%pymjbf@1sHqCzil;2l#V+0E&) zOaPfI!5b6YxG=Mm9a&flLc?opmCQt|iB&^d&gLezdl4T1q>4AiQWS*A&Ys4v`JspO z3pe{CitY#L(-vY=;(wR|p|n!Gn+Tzgh4}7@fJ9k_OFxz_X8862)%329tx;j#)Al`vXZ_^wXjG^TM2zj&4;zAz4IjK^aD^Q#4-OP|Fq|* zq~wT6NwZJA()vH=evVTVIyt zm^Q`K_T+yTT#Bb$0{T9OH?JG-|4CpE^K|Ak&me&0jT`*sbL){ns9`(4$(`|KbLSAe z_s{yzL{W5Fb<&ID8`w*NB)!4u|_PU^mUR@EnIf1(VqIOlgDGbf4a4 zpe-u8t&0_lyf;JMIAvkv_?Xx&Ni5OkBHcpgIdiAGkL@(qM7uK!QsUn1uDiwjRyei& za`%ST|0@cg3a5qhXXSn@r37`zzp6@coK)Z1-~t`ovVR^Cpcb2$#kwj3|1Ot)66%ct zgrgubJu;X0dji6E$;A5B1Hp_J!G=C>j_-V^ZHR<7H!dpu=+pb!JM5ANNQ7S|8LK8* zhQ$%pETpxK(yU&Dv`Rto0d6hRrQiJJ@u-L+F&--(yygHd&W%C))(=lka6vyx;H5K{ zue7P3ynnR29=r>z4clr~%I(ZP@q5q_F5Hf*Bl&xTaND-g1guOFw-ki^3c$sSvAVZx zZdlAlO0WCm+q=7v@UO&(g^9*wqKB}jjc*ImA|mYmSJd5^wbnU)LaC#ri=}6E=MUAn ze7n^nERU2b8fJp)xJp$v3F_l9cMFueXL_k>Q-3Y(RgHzM?$LW7s~G)y4M)m<{Rl?BVZRA3t4c?is( z>Rp>)-y-88cg}@{+K%+W&46X~%)Pgc@tY<3vYJTF@G>o@!uak+ybVnDoyXo7lzp*o zlS-pe9+`Mm>TL7z-;^s9kc87W-QWDj|9?IJsGM{+9Z4L=+9U`k#7W6P>*kZ8oZzIC zBPZtdfb#qJ*~uCWEjp)e2fRrO?8nY(jKDp9VI7tEs6}QsD{V(JsLA8rt9x?Gsd5Z! z?_t?L?FIDI-QuoU(Eg?68s7Mo4064Uy4doaA5FSOL!o?s5N)TZm<3kV$R=4G(|=^n zABbBXMhmw`fB;E)QhTvEVT2E1>qJLC?1^EWq3bln|S>>V3!D74!q3lD6>#j+aB4(#_V~~aGkbgEp!lrIE zkMZ=`xcfi%C4r|^T-kfz8^b$!6r&%ZjZtQrc937`O-GTcMamBAK{T&m%pxI=FG>bz zBav;^dtGUHB`Ztr@xKm5T1%qm;v%&0)dF$y1e>g&sK2s_Pf=j|7fC3!U{!|n81_2Z z*o>q>vaPg0N0$dD1rEYSWq+2^ljmeb)#xQ=g!P+mc_L-(jWeI+gzZTv(V^+Y={*U! zL`o@RlwcDx*@4Q%Y}CBG9k&hzio>q1J%j4arP)+)#mI(0il5WpHk9Df8Mq;)-VGa`ly|P-(kBu4 zt}H!kcI+)W#}8$}&U6)?vZmEdd7zz8gfNTTAV}lgU3^n`A+|a8gM{R?qO;F#%0{>Q zs*I>h)|5Qi-#d~xS3U2a+nQXvDdShoUw>oNP*Kmx6h@Oe8mPG%T%F6W=slb2iLAM3 z$~g&)QU69i!1?hm|9{_DbBD?+fzGEk4SzqjmQ=m>SaMYT^f`KGNDC2|!|zF@iS|4C z>F|f`KZn>yr@U$6?;>lbze!208x8XRXa~229^_aO8Oo_l8Dp&U@AGqQ&ZyJbPR7Bk zDgFMfHzSB&4f|$uB?eKt735sd)>vE=;_5K{^Hwmvo#s<>i+{e?M)1q4?EEMK_Okb! zFRIcibp&y;3i**LjLQg=e4K3tLy1^`(raJUG9~KX;z1|a=TOeMJ1Lteu(>+4q7c`c zrTr*>c$uyCC6W^QlIJobzxjvBGLGAi8${kkX^cSrRZ`W&92z1MpR#OS-RI9YnsOq6QIb*_SSjgIw~0kNtV$wm=_J7dLh+BbBIGJP}(<_UOz;aGo zAS6)v(~0rOvp#nK05)jQO{8Go5Ba{U? z#IL&(+g5Wq$R7Y&fLz(zfX5KF$I|Cphudi9h^UPW zo_`^O_tn9FR+wspi)ELVSP$V+uQ8WJ=YIQn^-{F)G|{s-K%Pc3wdK@MO~g+gxMe>` z5Xb6LU~aF?iH!IwE~dU?8%$yMKq0Po6y-TIA`4!^BtK+c^7@^Ow@{3U|2*@>!heY@ z3ebrP$TS-L1mb23!>5I-SY9}|t1n0TY=22DJX*|=?dlIpF{!bXypqMlM64xqj9qqm z`8S8K%xjD(@W@F%2?QHyXkoSzc>i! zP#0GyxTk+hb&S&=S>t8@k}qU=U5t9xVr5~nA>UxD(UBTv_`y`#J|(P4u|szp^4IgVSoF>8c%iW4w{vLMaiH%r*lQ?{F8ZxN^YIi?u&)= zS*}Z2Pks*Dq3!?nrcifIVpURkI%@rd&2}Y>`p*ip&AvivKLF(m*<@=j8LP?Byv44G z6mDiX`i!I;;=LB^f=%pg(s9K6y_QtC0MDVW1oy1+=oU-VW|@sq-DA8lepM4aj9sf9I-Z^vQz+9R$>ZTqp)++cXIUO|{MrIBQ+&BVkt$ zkZ@kJIjGeo@Y#Wkv@YWHn%xB@i2{`VECg@7r{M4&@!s5cY?pCGmZ_9Ul}i7ahVR=F zMn&J zdy#i~qN!FKT3xWyNWRzsHU2X%-1v(DRXBNaRFcG#5}M3fb&2%2T2FeHR4-sFwT<=Z z)7OI0FCKyCFW#}Le$C5$MUaWb%-bdG&D2f;$Ll9t{-aVE4EjWBu74)aL^3?Uk)@Rk zW>9cu;JM=I70t<>%y)}$wnI1#&(t~4bkKN~tP$oqj%P@m3=L!e?M5o%Rc zbMoo#MC>ejgnr~TS-fj`GVI-7g-ZHw_*;bIf&9!UMCz@=`(zk()lfQQTvc%^f_xi; zeaI_8UcL+M6ofDx&42WdWKA&slD!UWYC3(4#4=3dnO9$_hC-xuMcJ%11J;=M58mcT;3rAZD9 zqBL^!WS%0^g^!m{+(a?k;;g(NE@v3J?aDchmEa2BP zWx>HEW*xiIc}LGDA{q|d71ADD!-TYkaot3~Zz#NKDeQS$Q&LbW&(-Q#SM|x6wUaA> zj?v}2VfRtpd{SVp4jLzZbLH<<^7Zw zono}=)7NaeN&y5$mbCo|+kY7&Ejyz)ppAIidbZz`>k$URIwI^3ShTCj25YG}9!8qL z=UFIm?hjW4SMM{vMO!Bd2giDUPkH*iY0}q~RLxZb5&1cn}yV(+!4^9TX$upF`4_e-ay6M?J`gf!^vWqyWM*eLI+)IUbwid8Gxq&m^P6 zTo|s^VKe-Py&35PrJ@8u zWM3BTGzpa5>9E6h9c(t$Ou4uW`UwB!Qh!@$rH>x+$#Aa(L6eVC_8EfmKL9oP2Kxc% zw>mz5i{!F_!k$AHE@Zd|4~7-Lxk509#3T>okzh>GJs$l3)~sZ=;xRU>(yoV~rZr7< zRXgs-p7xOOpR$&m4Cajd_K@qKV>$QyYt)$k${s+#;PC9sa7O-*;?4ey&wn}3?tdDx z*ny(}ag47%Qlb4lj};ImJwm2dNaz~?p8!p_SJzJ>sSa3>TDL|7db{3v@OX%b-`MbA z&4|CquB2J4GGoZe*T+s$=8z0>E`NpZ;keD@I;3z|=d1Z8VX=mZHNKq9{csg4cEUiv zVKM1Gl-G`o<-JYiRMSFMu@QvD=cfGo8*}U2VeCz*ZxZA={nJHknX5VfOq`B`d(d4_ z`EZK1%_0wCgEXS0)huiQSv(0m00pt)l=%6}JRSE=pZlJvJhLPXV~8_Fs(+I;SL$cG z>uolDY401#qYD8nq54J6UIm#agksc2SdB_l5>DApcm4)L8b)j5ggJdPBW+~c>a37h+xzp}L=2-!@G)_9& zkClbT&$)4!b?WZZy&xn<#{U;nT48eRjr2EysFJ*wZABI8n)MW=Ptp+->y~CpBmmd& zERAb4>}AoqyE*FYsZl0$fyS?N)YhUrM|3;dPuldsBLGL5a^y#}6@@ejb_ z2jCSHAn?W?@{co5rGK)wG`Ez~bMz370swSixD-Ium8PKdMo^-v4bAg?Iu^{jsYQ`z z>&g{hbvl-?{_ls&|>s4(rR_GvDh!0DFF? zxHEsBlK0>E5(G2G$9yzeJdgjX#jkTNExEel!3IZ@*INfAEq`y9d;l=#XF7rWEAKx9V71k02rJ-OTKZvM+DHkiS!R9KzC65 zx*Msj9wvt%h9|!bN7V)7&*CBE>HLLSjD=tvP%Wd~u*!00N8k^PL0tvv8+ zCi7a}JQrM4%%{?$o52l>GOlkQ00-!OaqmIM)gt)}B$zz7kAK2T{j|L7Aw4OgveK41?47*D zTEs7Q=x`oGH*MU~?xDU-nHMy6CL0B{7aaMi#&(_Va&3*TpB-j`P$sn2&a&RvH7z?5 z4gIse_wL|(2xie&uwVrQBA*O*KP-)x*pY42KZ(~$xwgNP?Xa>@NQ%b zHOcU;BLu#QfT>wa{C?E8-uRQBP3xKB;@6zk_;5fG*aaxU5mK9)^=O0bX5IW@u90-N z%GLGNd8fkd9+YT@GM)}QCPtjep0jm83r?6&rY5S4*ZG-OJZEF;KVmMhkx8_Pr?Igw zv44KtItLEh!Z^~XNG0~Z)sZumcYH`^+-J^X*lADXyqkxoxrk!&T=BSa?a_!7_5tlq z9ZY`|wSDHglR-`A+&Z1`2IQ<{kd_sT&1^D?vbb!zS`q%<)$o#>dy(b)5{SWGr-%=w zf-G(Oui~~gq3rZ85dN;?7T|O~-G-fx6@Tn-@zR~BU2e2puNaaUORb2AbJ4{n>m}c* z-#1Ny9(xYbpc%DstCF6%4ytT^vW^b(xJ6|*B@f_?P*b~@q#cU)Vue0juu>(BQ39Xr z&a0+xv5KS6Zv;WhI@^F{?!%ct|GQ{E&z!w-YI&=Mi+u{jufFx!tY*AMIh`zA4SzEI z05y)_vUC3{MV{AN!RM^xH;}o3bwfH5c7o`oTR`HxxjGB!oB(!7!KFyovXH=8oDW1w zam;QJuqk(s_zqL6^U}A6Nf6+-Fx`6}*TzQpDDP?5E`PY=da5C1# zZ<0e`B-hu%tc9VcOF$XHq78jb{(qH@UdSzfv-DCTGc`Z_UFl2IqtDL4dyf*6cryEJ z6%Elk#4^2OM&K$y?xD~oPY^~VHh%+G4jxT35gFd^^FMwT9+6EjZTwZvi?#Z=3*;;3 zvN{zG7Trt-VnAIRmw5fFNnV8F{6H56YaC*Xoo)o6q1VSbV&8ISg{C~^_J3UDIZ1e&lE0=g0A1_LgE+^xzk(X(|odk52 zlK80Nfom}Z4iphf(*Bfq**yARon&mkD8|0dT|)n98EF}l{}z1X9@PQ?;xhrI$ONll zSLa$hX6VTvN{XR;{o~4H41dc?w!XNCiJYC+TpQbAOEm)gM9p#vucQ5U!BCku=D9O zHIE>Y9w;^6MF{Ot>8E8*u&f2)_LIZx+`+#`)_3GD#-kctKh&1t;voL+zW-xuo)!F;37>(m z`uomVN%Wh_%Sefc}Dls7}b#0=CP}a1(lVx);hIM3r`AQ9-t!!vC4Ug7{N%-9e!KL(+2t{c zB}LA(RI<8tAs^n^D6cVLE6r8`3pmKrsjjg7v^s2N?ocei2K{AJ8ZoY;3-fp3vFvxi zc6rWGFeBri6;gK=KDm95VeP~(c361WDbkrruz=SrYN!)BMnjL3kwq$vijR;5guh*) zuDP9js7Wxi_i}&fbyRZo(8_y$)yyBBt}idPC;Q5X?(sGWLwoMy)29rK z<5T?e?>qsw@j>fmil|C(ZT?PFDb-U^61Yjv^RCZW|M&64q>JT?>sq&&1bTX(5^(|m zq5}zdtFJpZ653KgvnOk>e$Z{DR{H>u**`lvUFltArhk*m%O!jzIA|_NWIffRO{bGi z@~2myl~=AJ(^fm)xb9M&&2t)21>x5WZ35q?j7V>GT$V12wP#njI!k+*yWO%cWDNuJ z6u<5gVl=muOeJAaZR%;co1TqQq8ddG4;p!-)QT7}h4aubPZsjT89pJ*5Mfu1hzMBo z`z2Fy(|?NE!MiAoaNLq?bhS!OpCm-lb6;90SgFU-MIT3gsa^xfx3)^+vyEV#`Nlgc z*~E%3PH*y$s9%}XJ4#jgn5;G~Kr%Iw~XKlw^KZVilrz1Jcl?r@RlK_eXVk+(Th!%m`V$t{1P7j%C$ z2-CS&hyUjY7JrV_yCihET(lpwlnE5}IsdpiuoM7RQ;s-J0GEe?4d6pD%FUG6{(td5 z9e*SIVu@J=)o~}6Hysz-M>90B>`YPTWd6^BEiK2VfExXXJ_!* zr}$~oECRyYXo1@Vb4^R_gC{a-f0+=h>44vhJeY*;k(9lTk|VifPbx|iUwX~+1`D=3f}LsiLo24#8a=p-1XbP^k$b( z(jgl6m)fH#5uqUh0##Y7&G`(A=@TgH1@eFs@*{Y-_#VyO3pg&FQSs(4xD>i&3$Vn44bFDoJrJ zkr!dm&Ne=b3~M71zwUYDZhvGc{f|(wWT$HS5`Ua|dyd=q%Ih8UgrRIWdeB2{D(Vin zbTeg2%rrrmO@``f9!(|d-7oHRS?S=^&-S{Y$}6iUIL91$xRV-RK9v@f{zB4vr)jTL z$>%T61z&%oSy^}C3EVF-ifS8G)?iINflscI+xA}L(Gtd)UeDD&Hh<#kBqJ!k-Tnox zWV~zzBMH=KP0$hi8BQ{8tDsB1Dq5o7ln8WquH-} z3qh^u@;2&q1+W~oKT_spCZ+6ucaa+2UmoG^(g5&StNPib9EoQc{N0ACcd0t_f?P#9aCC=Bq$;&$R!{lR05q5R7#8MJv zi5e6{$-dS$jT|!16sdIqR+EVM7=7n4jR%~1Q;rQ5OA1I0pDlbL;W(=+7CTk)As9`q zb!(H^!V>2WhbuPRjlS*yIW4Q|=L#f7HmeaqeeJY>iWK+Au762l1+i)nz>zX1sqdR? z=dvJlN#xWAT%c>4YUc;0V$DS!H{~7n^bTJOHUGRz+{|9q_rhVd>4NY$*)FJ~%u;^gW>8!{I`EIS6pux;S~^2Aqltk{o%`m0)=z9utj zYpN23t@ey#fqy7#UBqAy$N{8z-O~oVr#R+8HuUoyz4OMhn5t)I<2-kSlhen8-&KXa zhnUCdfOn4TVB!)eNNX9JV87tezoSsF7P#EsT>D1nlvs2}T`J z`w|f+Z}KO^T=z}eA1^=8x3y+`N1ah;^;*FtXbnA(<$nnfh|Ew;Z7Uy)<+OKLF63%p zc$D)XXeXgY*QB28_L;_T`IFiB``uCu-XYS1@q2*GH~?2`c&JxCF(`CuM9X0|&WV|v zqJwm)wdvg)!choW6)e^4vO9?J(-67el1|RpP z>NXKcF@MiSEWA70hxWAaT#ZHUe@-QC(IC6uR__dC4VS|k zuNBAS6^wZntciQ};eKY^X19W3kDzN)ZxND8qNl9C_D-HD2@e$vMZi1DJg|SBGcfSco)>~t(q+OuWk)@_zJMy2~(PDy+x&+ z`kfx1KXLDDhvYb#X}fH5#CJxs(%2qgQ|1Go-Nmb zX!puP$p(EBBvGNl(FYKR6JwLc_0L#ww0}}h8ML)m(}rHYxjrh}s;&E-+&LN=suxF) z^1GTI-ncM%k}~14Bc2PH_IIZbK;z197I0KZNCJjn31R!{9&*9RCmhDcdD>7qWEI_|O+O0$`g0TMQ}+SL*o;VKXn(zFT8F5%PaVb&w`Q!t`d$cXu3=)ya^bNW{ZeK1 zHJfuSnWO5wwqVO}%&?T%PD^-nNY0Y3&mSW;Mp@>7ZOe*;6fGDs3Z#!tAEm6>ALGR)M+JMp_;!^?ep}ozuE+{Ag^z}w_o(Bc*eZ+ z`OJ(A{Q9YM-Rvq~!&X3@>VT50ZN48P&L)vc-pW&McWRgX-B0f4Z!@|9XOaF7z!$A$ z&6Z6gm0itJlcJu==NRsIqaA(V!@FCMw&)el9mjId2*XMKNgliQSATrTIGRl^s^q}G zn^yNShWx4czamOfbd7>Mpa`*H;#?m9w52=erA;z^ncj;D{>;F6Nj0RmvwGipqw;sM zvaUil(=~Z7bBgRBzQF+755VlqD+6eQe&jjhxyHh_)N<0KJj-8tjR?l9o_cfpV$yY+a>sMUn*k*$f3HZXQ5hl(!LlO~5P@{OTpNYy3d#*Iw z@vqj%^2J;9UV)k;>1TDvw#UbQE&D{Ep7}Qg$VK~_bt$q2I&W%EQ?iDprfy$LEpwcQl3<3QqA8fLdeEMF&z!rDRI5A* zHxn;A?7YykMBVdfF_?c91CHlPiUMby?pXu8rP1}6PD6PCCMQQd?#)SV;^q^eoMqn# z=pTOZs$;3~Nq<_mLlV|s797zFdgL%l@KI%Zm;B+RU<`G+ox)ILfRQ<7*5<6F_i!_4 zQ^G4ogl(?T_fvVy6C+WCbrqSrV|v0?LWud&%vyucP=B3>0C9K{QHmiCE+#q1m_^Du zj@x~-=-0ntRWxjJ%0QxNZY{906H*VI>-}koYxE)!iMnBOoZ29lAQR81U}NJz6XsFf zo8D(c_2h4msW7^I|2j4S0l~(EMaa52b1S2i>|rmO%gf!fZE8AadK$rj509E$7gm3$ z7KqId=6@8aLd4*%oS9B-B4vZg%&&%m~Y3|P`dZG2-4m7>e#e-5x&|x>oAtEmP0h;j#M6h5ywNv zDofA!7QtAO%5s#~P4>(v(O@*Y#`zL_6n{%P`#_0LKmHFPX+cG=S+LQAleR6yFcr`L zAR*9|O!PhV$7QVuCsvX0^5rLw!a3>mh82M)C1PU{n&Wk4XAMB0j}3T$=N z{^@>YdHX`^m9J}}*VmtZDLD-G1IrfsZ5k&Ro1~!xmQnXi10R5LHPg1V%zY=l3x9`m z6kg*AUrsN0E!9U_>>=aa^a3rk*i=G(u7{F7^h4IOnjf&qapvAM;6Nf1+=zB%LE0y^ z1r+f+8I=iqRxyOA-{Y(J_OTSCCMe4Ma&_qMbB7}=gL=KB?vWTJCDpj71Ed|7dsq3S zcq_~wfF}=5Jlbl+6$|Zefndw=C4U@Z=o7C4My?2Trl_*QAW(8Ta5k>dy(J;6ROL72 zWP%N-14SvRMCU7egNNh_*u|wW`;VO4_k<;(x#M5%iSR zrME}KIa*i0?a7z>RY3QqZ*|lylvtA(V?N7#BVi+)sw8d~{f>{KDv^gI%71skgyXXo z;#Xw8(=?d!ZzZs!{5E5QXQY*TbGNA*-LDTHs&x$l*q~F9(7@2|LWN_$6>ahjeFvS! z>jiw-k~buYS21OlwZ&QJakF-+<=(mK6v_y>x}mHR>V6v0Y+5&pZKj{&jALQ?v?W^< z9h7!J0zbQ$kk@<%Qbr+=SAXA$3mF4<@b9+f)AR3=v`0&QuYAK%>?_P3e=2BFfCQu2 z>Q=`f3BhXt1(O_FuxRROza+Q9bljRwr9xq9%uiSh^DX@Di~8}-z!VNWMnA&b z#5s7zo^5bWrrNHCP{K!&HkLuHx`(n-5_pM|<7nh!qL=7!OR7hSV&O3&-#mX`JQyzw z{K(mhTLEnnO4m9{yK2IG2|dlp8S-vBjOC}neqUp+k>o2XE;K_arl|Z5qQqj=&_d(o z%((zl2{~+dqDE)TM1Nh)%4q24K=F^QMyLrV0&YeU>uLvS)v6Ji(2hsHN+ej}2iX5p zxliv^@KEXGiY5xmSC*j`3OBpicL`^~l;ocn$6s)9*++YG`~X0Ly%e8tq*SnlX5%Br zOG^;sL2YZ^wp#DZ(T^gMHFlE~=VTR*>HRv%D2i1@be=7O{C_g4V8+X&Z4L*%jow3~ zBQg+}pNd>U6$cpueXX3b;uBy64Ex0JbVlvEILA5<*&EWc{xf8IQrhjLXzuzY66V{HHH+tR7d-|&#G*#h9fIUp_Co19nVVXjw&>AgP#MS2F zlr)srwK<~|I=raWboVFLvU(Df8yAX|w2IxSSG&hccFr#te|nR#;z#0I=%_XpST-Dn z4!2UgKp#c%9kT11do*`}GC4lm?ka&?+W}D_|9w|3nSYVos4j5?KWcx6nN=k!TgjD8 zC1+tQ(Sgkty-~@VF^izQqPZ5#RFw1jVW7l61h-}O-f#_fJD|CGYUU;pj+CG4?3^6eH z|EGasL%mmL{!e4LvBH*ns;^WYVuWus4hsM!41fN)h2{C^;p#5NLbBDrW82(r6N_1i z3rigec!X=p-|IFuo$-qDR+g4)93t|>N%zy77hnl2V20PjWLJ{qDBy8)3XG^W&XSW7 zL&r_{B7?oBa&le2LJH59siA6VO=-L@-NK7C(25`Rl$So}!pXg0lL zv44x9QvhmdLLLBjW^IS4sW%Bc#(!sDZoW%(&$asntT0lXmmb9L~VqffzgnmIi?6N)i=fGeY}yQ+d$udqxBUE2 zj%`ZJh=O@RA!1!>?GBJ#%BLrt@8)h z`mnFzewiJ1vF&|Ua>}|uY1fPY(SN$l-M5!igGP*1_WE7wvTrPh0d`bB_4 z%G@H|xz|q~+MgDOy@)-lL)Lr&*eAPIkMh`SFK+GA-mMU2=bNSGr1edsnfOfTtrhj5 z#nU$jcl{mM7N@b}-ZjQ=-sv@^%HCZxPb3%HrRNKav?=Q7uwA6aK=db?X@B--!Z9#Q z3piz;gP00PnIr+WWEKbHA=dQ4Ep8vpKf7g?cXz+I?-OATW3#k=^_A+uwWfJ&5H2vF z-(Jcfv}-cBvK4aZAe^sJtXpe*YpPgbte7qpD}(2P`2YyRN_!AgegH6IP>9_Yz!pP1 zDD^G}EWILqtvcvzCUVd^rGGRvC=u0wx680eY&X`>1yxh}o~6p1^_)fPQg_UyUws10 zvKTAg>tx>Cu`l7ILudc?H2dv~H^0<*y*mHor&EijA4_NI)zMdw72Kk`X=dc3cD--= zW=`)}w{usf&KmRLiSMB~z&FA;|m?$0jQvUl^j!)VaIZ`!RN%4^J~uyHwTytqyGXUJw% zhtt~T{_W3FEAo<#%~H3HdOmyMqIK(zXO-#sCTTtvsGOzo{F;ZuuYy~}d*yPsteak# z{%>;Cya%^ATr(pCY`ja4{`t=&?eJv(=Ccmc4y+7d=8FapQ`q6n{>{t>g@H^Fhu8bJ zi!m~a0NFAQkN0oaa%7YNvIQO9?ceU!$S4S8OFBH;zdigQqa={cFfBRKNW>Fwp+~LLk?XMh} z#er>SZD#&w)PBV>RWWG)xjxI;j5>yZuS1R5M4@9XI79~_|%GH^A}Fc1bAh}%6f z$lCPc2|BHJ!bZQt1_EvRx=Rh}6WJUTT9j@~4sf}u6AzpkzwM{Sj|Q29b zBqIaEG9V7{W@Hj&MjQizoI>~+cwn*Tu%r>h#5fd$1AZt7vO%B(1x*t5;4~qKV$uO% z>cbL=9Pso7F$ob4K5+LyL)ga`}@CD zZ*A@E?&-_!{@k>jI?r>Ww%+cpzXkp+{96V|fe`*5<9{IkHwfVW*z~U(golpMi>Qx; zfCoavLqNhq_}2%b0D%yZ{%0NN{|{soBvdp+1a#1Uv!ZW7h=_;?$OuUP6UYb%Zx9fX z@R0FoKq&8MIZ?SJ2*4U>l3xh9HR;UVh@`&3(0R1n^Cs!};kBFp`T!6G0_gvP|8G3v ze-soDD%zX>=#qE{AjJRBh$#OvkP$$b2#9zzNcfx}2@TpW$nUt^zDdHsn&x@6?mYzD zA(MYm;F}jx`C4@ry{H~jgo&5`mOxlY|1t28@Id0A$L{??R)XO>6m?e2I|_UBJ)qnA zp&dg`BtkG^CvJi{6eT z4DuUvu8Qx_7Ar4QZMz zg9_r51YqHTNXpv8PoMUpiFM38#|0$dmC-(74qqk$u7^grnehA$wH7K7AZ7ai=F-Y+ z%D4NT$w?bIR@PkZIDQ7TTOd3f{E4Y*rVPP(aQNZCtEg0%?|_8cG8OV@(VFbOgJV0W zZ6`k!mTGg|nW86)uR6!neHEGAC6l^26e4Iu;zi3zU;coBRBDfm+mg{Xf6;UNJ8fWL z&PS^!N{?{#V;dPtRB_^${z@06>l+DW`qnAd2zC zQm87${?ujIm7SZ!K$J%*&8BHc0jS{*s67i=hvlR)31v1(teDKHBG@zEc3;I)^2VYy zehCt!atm!&4H(_y!CnF+sA5Dc<6UO?%rfoht?e)F3BQtvUd_I{7e(9ruK#S*`PH%~ zJNt6-*}D3*fL*l(7kDkZh#btp{s+Q-##OuU`K{GhScpE7F(CMxrTpqHDy!yb2xIE) zpvKRH#am+}2gRm0n2khKg!7#D-xVL+?*WaYu{`t;5qqK$C2d@P)QiOjvP7RjRawIu zEb~jwpnst8&xA97caq7?CR7qAO+HGvkvr{BECgVXMfN|32m#n^YAK_drtAoMBx-JY zI-Euw)3{%wAY=Amf~`udeZjMb3u6h`sxg@9LtcVNi~!cnvkugc#*%h;V_nqK>Ox59 zk(Xoui!Yf?zUT9Ab(oW;nJP|6N*SpWG?7->@C5c&vw$7BmU}(1?BXd*K8{Am#x}h3 zkvD9iYgmUY0Z@T^RiIT(-KVO8No0GE4IaY(fvWjRm;%51f3~lOv2p3>k#e4|DQ1hW z3iwdDk7N_B4#j=4qiAc2e~Ux%QA$xs0GaBQ^;)h^&Vc{pG>qt00qobdGB6U5)^;H1 zi)=j#G^0HKm^6raC2(XNV6M zr=n@|Fg)){@eYNM%P%|`?hl&GPVMc#!0c`!1ufXihVJ+l(_1EFPTQO`u)V1o$uX4#tkd1QLo-3VVfNt35hqK1& zH0F#A5T+t9)D3S} z+a08y6ZY`>GveeFir%dhOYCtjz4nTDzEZ(XK8`gxeEMj+kLEgT&Mo!LN;R&`tk1Pl z<$YKduA)b;8j~Dnk)z`>GmaXI%0en$MQQDz|G3}@%czU)$w8!d1Se6pl_a0!C{ise zKo+B5ImyS~LNC#VMCrt*;QlRqwe}}q`jXQuM>&xTDZrywt8iI`t#XcB%#pt+Jqo@S zJX#!ym+Y)lUjB6#mlG=Upz<1sa}RJ_kI>|D%LBy+ubg_#H;xO+zWZXSG|}m0X@#@7X*RYpaR1$jujb4$S7-b6Fna!p-;m@1m8weU)D{YuBsPgw)gp8{o6Dq!_ezfvZ|TIOC1hR+1;c8!bzy`=VurROi@ULImR_7C zEb!}4+-1dABXCm7lC)DJ9j6Ve0#fXg{Fka4 z-qPK+&jICz;22Vq*^`BJm;831JcSbaG z9JaL2DrwCMv*U0W-;~~|%&G;XauVmeg( zoVktO!(4CC5cJfz?Zb*tuC`;8WtW_y%vx!%J1`3Lr{gm55ovSv z(~24xr_O$3gM2{l-F#20$j?4kVEi<_K4_9;@V+2uA9>LtL$-G9A85HEW{=&lyvlj9 zHhM6Z`B zgsPdvZ^m^w0AKC7^1=#m4&J$ve$HDFU=dN!%(~Ftt~Hxo#Wjg)WSAiiBPMaEmE(ub zc`+Pua+&MzL=Mt_hMhD>=q_<#veah?{dwRudY4l5wLAkQ%;G#aMjSAIWE{L9i}LlJ zx*hWfCB2xs?Wt3Cm;D3}3GOb_=tc_e9o`x&ZKMimS}3&hOZf+YRQ!f;j5a1Ztmc_? zU~X?KD#gD#WEsyxA+2s3c=rnl<_RH1^0fWPCTE)QV_92NWfPdTdXzN1F~oLx#rX!ysE;Ghb)l>ZB2s zGv}Yy9kCNJ=NSQ>OX{?+3LOaisQlnr#C!7?`}`khmNom=i}vHY1KE?sxaHT9P(C~1 z1p}*an}{$HmYPB*JT2BK`D6K!x4ajsIlW!@3eWOOXOzkKIcVkQ;bUg z&)hmwgAC7ZovvxiqBBR;AWMtyKad)Qh9FMA3~of1PTLGB&9?$em`&)+Rdkz%EUy)( zvPemp1J$?$6P|{5k_+D9*dWUB|v3EUoq~na@s4A(6ee`$5eQWIqSlT&D}#QTfKxd68?eEwg2C>ijQKwhkF3d2=qH%CkP(C6cbm zLy7#Zu#UKZlTKmEtj2k~yMn@Vu5{5}>7vj>(zsapyWYt~2gW+3}_)V%c{NOj2rvfbgCZufDf)ZU2eu6N!)f9=MILHjmF zD{Yc%L=oMRzMH8o9{ldFN$*SUQSd5nFu=Vxq7kH#@V7h3#>i}0a=t=vLXDRwzgjBi7WO^*1dv{VDf zNtROu^wXMo@BFnjoexGhJq~wk2$DFro7pG*-;q=9*)e1=)jl&PUA~0{V-stVptW?n z7Dpn;cS;|kKNhh)y*n_+6YTq%C+;#~SXELebsykG2g3}vbRyzn2dB2Io^230Zz6l} zxN*U1FF%siKDQ@yKL?GQ^7bGHe*k9hs~8&;-R)mZhu7(Z`P0^yphjLq#c{rYFi@sZ2sRe3(IJ>9}gGZ8yb|1Q2`;hEDi8jH+-<-dKgfd^Pjuf`EBDPvM->-?y4&6K&6!z=yr~rfC`J?0->rt5r zGWohon+Ph!7-J9Vipq2t324{9u2>fYn24S|CTcQC;klcZw|8bU_Uq)>AI&ecuYE34 zWJfk2O;=(O#)>i<(2YQ8*LLrf#15%bA~8G~7m!8KYK3T;z;WYN%xYLzt3<84e)Ua5 z7G!nFrM%YMV&p>qtG_<)mlZ}>UZl_}I}ZHxLqV`7NwcB;PB-X2B9+*MgH!1FT}80s z%Ib|J`%5jp$j=}h6CquQ++?%n8_gx-hC73=E8eGn(rfF-LDBQp($HTRSiVrL6Wzr1 z1$~!q|7CPCH~f8nHw6Q>lzYsLiLxU0r|ZvsMPN^>phSl(iyYvrU|+zgg`|eU`qL+D zjH7arP#ULi9r5LHi&<3TfyVl>LSZ)ect+0nnl*ylJv#lw%Y~}Km`U!JXF9@f@CrwD zew|MH#8Sqxo{&jkusISX)lVI~A&RtL!1rad$S$1*qkdn1auK)0eR}XWz*b0sFK5jT zd0Q3)^*S!4~$$E+IZdfk84>69A+ zdF`oZl|d{tm~=J+lw1QW7&_MMWz`Y!KN?EKleRKtbnkSD;;y&@YG}#c%Q+O{AZL~8 zE)Pbnp-zXK?P8Z_%lY(muQt3|R4~zio46}} zcs6SHR78Hvi-oA$r3Jx2AH$qIm`CMm2%IQZU8;0c2wkm>rmkVZHxSWZH!GO_z?Ecy z*W*rC!rR2Fad#K==-&$Wiy5SsM07MrkAn9{A_FKi{olV){FSJdbIBjtPdlye*f77_ zZ*n|zPpJsbO{Ozdw3v34Bl#RuQ`awCtnA7OjT}Y&*!b>;X4Rlj@TXe;Wkl@vNs5m8 zYlTEJS^4p=YwJ*l>|zx{SgE8#3Kywj%XMKhQbf5L#eNa&?%n=SA}s@y#22TpJYwez zrU!@+=9yP6CCzZF*n4|HR3j}nrJIpv`F!($P${@QX% z*T#_y8JgP6i_}l*?e!Kp&T@{mA2MPmez>%8t>j5wGfzzw}zk+ZX-<1!=KLL9iB9?RG%J}a&<6#PT%M^RfBo#LXx8w7K zK`fiaN(Nz88seEsiM%R{>lm+`(^*k}M~@Pf@IL$K*3qed>u#h7%9o>nnPJvTUD22i zLh+xh+D!sbr)k3ckL;fh@E&ZlHZ&_TCL(J1_Lo}*_#{!dnZOHg6=1eMenc)Rb^urQ zx;-G+AA51GByYI7xuK4Z=>v6_bBOf9T7l-*GSxrh0kzJpA{MyPU;5EAWF2#R@^zl?APrDNcCF4a(5drN3<5!lAw;f#6F;K=rb*o(7YF$&az z!L%Pyzl!%&h@&}b_md?iU8p9+GO4~)T0QuW2s8h%)y`zz$h`_aHyAl?4wK6!<%c&$ z!KiUiadCA7!~ox3z{hmvmCI&$uk~nub?+Z&nzG|-8W)$%5basVEj6=1-5x;S?hJ1E z@M*Q3pk0Mfb7$v!t4{f=m^DORQDbKOZFHMl%7m)hN+;IjOcyqrXpq`>QTn)c{l0}; zPWzx0A3x^Kmz}bRbaKt6?mU!6GlT>dV}W9Ek;hAxY7t%@n}bEh&rFTrWo%Z{AD9m& zHA&w8K%*9IF)Q+z=Z6S`{7?XU$ZO^S#w}~*9^v`YbiYV!nWwG);2aA=ZTjSJl&Ghq z0I_spURYau-i?)m3Lp)i&Lk+=hnxJl`fcUASuNV*kMNosN-~r%QpY$a1lC#lMaVj+ ziSn)X6M;Tj;-8~u=S+>s7&%^bg<=mqkL$d?Hn$5UmRjcp3y4KNcQ zTAkS_A2qXW!fD2fX9SCAO>N)gD(DdDFaJp-P38)7))NkBMSMS5LXx$da(>!rx;a8y zw47)TPf-VprseXAEHKp_v(u}cqv+buambA|U`ed>UTeo95e~^`ew{j>USH1x<)D!KC1D#7ZMs9G)`F#=Uw`VV`8(stFoY8b&Gp7VMM+KCJPX%1R z=2hP(!g<(!1I&tllayd*{ddR`*OS?c>2<;3lGf8w`^1;1-fy(y-hfxV*A< zJH;X`AmIGtZl6*KH)=ZN!O?~qL)lrdkX!Bkr%i>t8`a^Qnx^W^?|vOG5PSXuvo}`1 zfs)R52jj>SWOR85 z>fXCbIFdm#-qeVq3YgxwI<`uFBmQ(|CT`1RQD zuc^m5+(KqK|64kzYqD7k_r$qE{m$+Xk^^137XAlDSr{z`jhwdgOgoq--VLgTLqCC_ zCTa~$ne|DrzSES;6ywNbx$D+3&HKApp7QLS42^V+I+F#b)flxM8T%2q$8 znrpqqAl_h-=sL}mj)(?h5o{p1)an5YBO+Iy6H zEP9cA%^ut6$+&#cSjf>4Q^wh3@uBdY;+BEnspSxtvQ~wS1W8`|3!Qisy4=i?8T1$t zXgf~K5C%OI7Ovq~7K zoDy0)jlnY6ZDjAD#f8=EC-E2DW)#W^B*bAS59{$MvH4|WBm=EthA3L8gO(5p5D}OQ-C1e^rFnTyQdS;o+_=_#Ykx)o8)`Ob*8@Ty zRzTq7YHkzLfS(->ZX}nS_?jediOswz@*6LP=%9q!2uBiIkBQ!$jA0Uw1u4d>iz|Bk zs-h38508hNl-=O&y>HuN%e4_LZOLV4t$zfV_<8PQilY z0#%hRqJ^?2LA9&Lyp1IQ(!Evm@B5;4oYRg=zL$2dL~lDa%g4K8=ikr^^E;FEg|9V6 z-q;0JHhg-H=WKX{`J=GqO<0J170>R%N#AwLh&bneqE7Sg^RgXvmsrCI0uN2H%UJiU zQ5WZh+7jMgQYx`kht+ENT}}!7=aNJj*F0^$4JM9miofe9@q7}%2RU5}4H%L=^K7bn zaqV*E&q!@<>dzkz7G???EHQujV(M!Poz$7!>Ny* z%)-6+2KO!G$~rZHz0cp|c+{SW0&Je<<(L#ZxNtdXt&c%TK0jZDw$x_&Kb(!V&5wj! zgJvQ$KcP7=Z2T>Zsn7E*`z_}45i0!0K<_L2)NlU4e9Sg_{)7Jn@673thy3H@!%mSC z6KZz?>4QW51XM(o4#WdYJX(jrk?)Pj@8cenZNJNJeW)yX2T;7t$!}l0=g8x=Ze$~b zGObWH8xnNRZg(}S@BAWfVZAsSDjX)I?bpDH)0ROn!$f@8FXP4N(#Ef2$QDb?{+c$p81PQ8M*OC)U7!g!FzjkrMQpMS3|v>5o-L;vRIbr9 zHVpQY;5MM}2CADr)X4>^Ur@s}5|m8^m6Lyc^S|}-%bzf^g!?K)bIHdKlUS;s;{EX* ztJfB*mtofrVz6l+REilIC9{yE*E75Pp60t_xHgi2(=&XBx`2Vf zQMxd~Uxti@EMFcSSe}~AeOFR!cO2Nb!1)SH1MjPSPbbdmTl!%H?qQhZ_>+-u*AtSA z!{Kst$4<`xQW>R_eyJU8SK$n~$cPN@uYQI8>q~!lha~2>^It3W5#e$!AWf)ds{xMM zVz3hfrVAjb8p&vVtRGdsML7e>DUg)cK)+~U;wR0gC!o!h9Q4t79rF#6ANAW(lqhs1 zY{j$%q7B4P)CAxQiwuUQkM#7=6Kg$k-#$a(tDfO;>TrDX*fUuPNbB(F^Ln$owf7>C zAUH-M0ZI_=LsSNK4OjahgnWtC2t`5oP?>@=6_7SREoS80lJ$P7Yy3J&3D8e|9vbeK z+lBp{b$8P*X#QHC{_%2i%wk)9vB_Anoi)bxyd58tld#gOcb(Fs!E;C1s#K!xPC6Dm zAYuN2Z*OcEr!2v~SGrJk_d1P*SJE`Y89F~(-)tevo4jcX4ld7^RJYwg^kSB2o9l{g zqrRGAW-4RK{7ji(86DdQ0B#i?%VpRiqpr09XQ3*3_zGyGcn#dsjMT81K~8S&F4mo= z@g1j#6?h8A8G+0zNBP#Q;x^5uk^^FGZ0rPhe?4#3ybtn46V${>e=j4qSpV$h-2b1+ zoP(pfhz?bspeVqGf8cI?pNmNG7;E8a^zsknB1d|xIu;?qQR7ENS z#M1Qs17Rukr{bTS`k1(7M}*}&dEW5rVTj$jM@GEVk4{CbVI|PfZX;JTwbj)Hk9Qog z5}G4)MtARd_%)Rl5M-0&XKph5*V!y+?X6kRo%TxGEfIIgDm=1l!oZ4$FxW7Y z%ah)v#YAeCE*8aZeDkhFHHCx7aXj7-;?1B!O5l%B*Y`GnZJz#u>&UqIINCv=YUQw< z{g0WV83^^leW<-aFU9QckwM_F|2CS} za?`QkmeK?uHWSpA@ad(=coH~DSkeLR7sfB4hPsZ9ddT=sQq7>qrhgBw_T;#WFbfQK2^ z2c9A^-XO;nu;ubv!v-k>)V9EH9`onUcM{*dXbwa7T~4P8hx+~94q*)02kNV4pz3bR zNn3kYAh`qkfXfFAKI3qvOuNiJy}W3luF}*cttSz8Tj_dPxUQ~ zhzH1*iDvG41+@(h7vF5aqH!U2j4F@ey~QWktO~9$$HY6f?Q_Td%*_Tp#4gNZ zMM3=V-nZgv&IPh4)4_xI1P@JaMNlI>4<-Z045F*=N` z2D*^vv!YM?1!6Sii*2&Z(i*icIxU191U*KGwQ34LPp8V-zHnrv@*NIRHw6jB<+V3(En7-5sBRS zt$e*GEvwop6bDl_fH$=lKy-<-_l<^*>HP59lKu44nPN3(@niC;P7qly{_`KR;j;MJ zQ%(FwF_RGUJN0?Why1nHr7{w3l0L=9Oc(g-qHJy8E=N&Nlgs1SwzsN5G$Aar50=}M zpX)fK`SFKNg4?rYQTZaH2!*QNYI{A7XzOo(+;C{tj;c^AnG&S>k~mugaUnr{ZZjo%BXE827RU~D_=sdRyVzQQ`)1s&-+I10CyK+I$#xE-nbsZHHE;V2 ziEoH03Q_IBWRW__z3NA!sBn}(t%1wQ+WiK%LjQP=c)QySQvT5B|IRTjD4PU!EGTJB z7BL>S49HG?{UZhEteg5K>+;rp3Q0{w9hpR1o*Pjtjw5>CR?bQHN_OHZbZueB zy!6@mi+2e`pa${5KUR}72CE3MT)9{MOjCMUCxiRTU?4-|Fz)pC|19FOSW>tS%XKHx zK>0%}`i|1-J~HPED)1JE_QrI^GMK2>X`bdjC;Z+?DCx*O-s^+txtKst%=Lavg{Rjm zhc<-%vqsej>ELu?_TA{$@|MAJsFb;nAQjW4RbW=cmEx45rJsqplKjc@*_a%w3RxSo znvs*A?Z^!fT~)!OGP#QLvXu;>SQokZIswlJpg)KsY^u2k0gOysVkoAd+Y9wbgO3)_ zFE)BP9(0c71kI;}SZrC2x&9ICD4k8MaZ*FB&*A15rh+MU4rWpE~ zi`Ftmq70N8eh*D&SGJ)Qq`&B`r9DaUHv0!^(qC9k1;?dFo95WP(u5UHC?}N_OTW;X zmZ4WlWtNE60HQkif=iDX(N-|d;fCr5y3sn)+zYo4j=v}p?8A+D#oEGI{Lddgv;KTC z$%XnGdGYMBT|#&nzI^yvI-QoXYoEC_)uGC|T8Au)lY3?eovQ4feR?8>`U9~MoeFQ0 zoQ&+10=e;*$*E)K!i`a11&^v!R6g)XCwAMJ^SF{Z0v$B5q*g*n_bChK%>(I_t=kTz z(=mphMbC?ECgOSc!BImGl4Qv!eTEK53m})oJMtaR7JK{WnN`g^H1$x%bW}R)!K>78 zrMd)FPLlI@^1_RMpaXpz3FB*Bp_kSA#boopJjp7$?S#0W%Xk9=;rZc6Hv3**IrLne zAm)md0Jd`eo`yxQ3Fb>^-ZG<`MtK`V<=4Qm$9w1dD=4Ht1M5VecA=ovDLFw}wx`Ak zI;s;8s{N*YJJ*Iy=^N=t#C`#j95#Nb-s}=MBJ)s>RB1kAzyMjQ2VWPcUr!L}oc!s2 z%$(#?Lt{?9CaMn=q{FWsG?xh~ZreYfS#RD0lq=8XJ{MpUR0IZ`RhLH;Pje1x( z^a_;)3No&y#&C;cqrt<_pU&qf2^I-V`GX6N#RvEZLh;XDO6>efs_y+RkUWNI4M7u3 zQQ_>*E_pL4g*oXZG-xx8lqE-T!}j;%>y+Y=KVDOKh1l0<$p0+OX>&;UFgMD)sNLZM z@*AG25t&39DTk99Q>C0HxY{1ZwMjp(HRd(N*@Dx+*uT<0sJIuYT&i^ou?1J)=gx~RIH!5C9$p%O{Ig{BNtX{UT{?cQWLTnpVlB|L*bOh|>Z;mM-WtBcWK>@x>Q+LH`_Gl%d0=wLe|iL~RKTH;Bu#_BE7i=ONF+>f>> zUrwE9lgle+JV3sskv2o|b3km(%4v#H=kH}=D_o&_c8K$_RxiGs){=X+q-nD-ZN0?T z0J)4@X?xpeOX2OL5F7|*e0NV<`3JHw;zM-k;|zHwLR~XCCBpHa$V3FRk<*oKxGZ8D zk|!tJwiEoDZ|^?>3Syylv{?fr?|Py?IZ%mNvU(NYC@#-yO{Z5j^W!CRp2O=j83?^j zBa7^9-zg}E5etBGngYCws)O1#ZfCO6iDZ(0rPI+HSeqKO&yJnhC0N% z3ZVAXk9x!uPp?HgjCp~YwhtPk7Z#8l$$kVgIN{M-?HQ!`$mQywOV(WF5Yk&(zj>7Nsh!N?RUNirwmo%l$7VuyI=)o{5t6ahqhL21Ad-JIe?q5b>ai=J8*qrlxjCXO`|f`#D>PKvWk(fCe@G> z^8kzYHtoYKA=ibvh&LOQ{R|(Vnc&(K(etREfKalI_DGY5GUA6PoCFhZqEr86J<^Y$ zh>vS&R#RmMlk&!E2w&2C`Ol=E&p0C1$%4&5if*)o;&h!bpNJjSD_bOpOmBo4z^ilO zhyN8~14Dq5!tBmtj_?Q3aF{h6O!7VN!tA%1N4e1%9~q(x9V7RyJJI%QZe6_T<)aj2 z4n7PT--^*Jw2oIhw+k#yxp!ROyp<3x-(VG_h^d9v{D*UIB zA>geVZAq8y$A~Ate<3d_5p;%W@=dkIJ-8P1?D+0i7mubt!!24RgqZKcjikvEzE?jG znh&!&y2}~R3+DVl6KIY{OFf-X=957cZnn}V2f!H@DK9NY?WubRh$$V1i|G$hM6Mtr zCwTxtJJa%pVIu`UWxPFY$^Wa)8VmO8p=p;%)~nSZfh!_PB!S$2ZKVA3dx^KdD=lYA zSxNrvpwwFb*RVWHcl1ddG|bqSd1$mMvfV<&tJ`F8YW(5j-k!>9R@p|YhiQBGCC8Dw z4v7lbkEC;op+TDa-TP7iD~u9oY`W=8tl|Z(Vc)Ys8s~)NZ8sDf+vPN(QO&a4(3zc8 z2S1{g=TBFHg+7v4_ACE=G-=6vs3U|Donj;zNQDuF4lYGK(MCCbXhk^pKlunP;+$ri zr&lxf?UqC2pSoUSCll_B&BL{5{vb9jC&+INBWF+?R2kKRow2jCc>NN82n)ov&D#Pf zd)a#VD=*?)=uI)Eq^IWt+ozj#ciNoc2^Ei-nuJD%xOQR$v`~kbr|Po|C2hM_T!Oy1 zs=e{^!wbmXU>?||SmX9Le)dYx#GvMIdNfzu`t@NM{>KYg@kmi}JVvF2HaI(tr?Spx zk*b6Q$~m~Z{gE^)Z#WNNC+}U zEeIwW$K9|#>tPZp9}y1eO0E?Bdw-}oI~N>=gq^q2PdMm#dwYmvJs!^|g$T$cFsV!E zz8AKh?d&}fMcDX=ICOu+G&G%;)g)IR!&pEXm~-!Gigj4QG@_-&>Gyu$F}?D_M5w^} zE+SEo#@YtwKj{pc*j+ssVKxh)3UlEP==nyFBhpH>N(p^%&%d%|^^f_zDMxm@%^#{1uFnZkuKc$yW>O)i6J7`)Pu1{z(*64I> zws5%QT3WfySch8N2IVrxM{44iJMsLyl(5tu9EW-65I(NueyeDAIt|F{ba#QV(|cI; z6D{C<28NxV>R64hrfHhQqU?r~V`il(tj{KPUvnJcgF3UxGEgc#JEa>u^VUw54}Pu` zcj7ahd!$v!G|%R;y5blXZ2v&5)LUh8{qx)To;GE6v_>nI5?B@b{SOybm)G}gfkx)5 z)!KT8B6!sk&5HDAT0lSZK!k2n1Zb>Z&%KY6-+4@`lYq`Q=e~0qVM*T(kbVNjI3^T=8`C9GCVl2Lis!r(qY49Ebf1c8Ueg2H@Q=Q7#kf0Pn@?pGz=v zZHZ0MQXD>}aSTU@?J3Wu`IfxWEzN+ALDu?qvXJ$OJ?1crbGj+d?M)n zbL$OtSl5yPFLVbZpHrg?mLcl0uwag+t>hT5TYN~U@kS=7l4SySxw}bAO=m<5KJN)X z>Q0zlH45C_2MT@D4{^MaE5ts@UlIwLn=z&n=`Re8_`a8wB;VDpEzp@MELMHhcw6bb zFf8cf>^<+>{PMUeDfDAtwzz;#&IQ@l@xSIa%7Aq8JI4Gg5sNnSU(|{8JJw%!V@lMg zIB#BLnf2wKiehJ5Kl!)JrxY^LqpnsXynQTs$G_Bv)(nVpC~e-1$+DV-hk#mpr&3pk z*i7xuU$eN;ks+7;I|itg;1(Ti0$p=PuBJI(>R@@aB83_K)5)nM=OPvazsJ7Vc<*dg z|8i4drH{R)lZ_%)kR($cQ(m7xr`H=%_^_XGbSD?(E%YnID!QLOA!vW(KSHb(=l{cL zeugf~?+F|Q4o&}F8oa#`WESZpdtA04J?3k-yJ1Me+RwJXauH=8iv3x^yx9vTN0eu8 zqs})R`lZ*QJ4AzChtY_H(^OnvW{NYElqEks6>Yqr{SSnZUe7xXE<+XOW#}m3g83Qd zYrkHnF%2qhMRGA^1m=L#b~xjZIAbIg|>pk<$6r3i0N^G*+m!znZ9>2lI4!LA1anc6|IVK-jArg#p8{4%L1avj- zO4@>sv)LbI>+0;{(n@`wcVXjKcL)2-KN2Xa{0VJZkySfJLp+CcggsRN^Y$>%cv2c#!^B1rA+Uwh{dQ@ZpWKw}zlT#NCgfHJ&s_>?IpIjkqDkaL(#3zD)e_u1ZA8!E_%6vygEa&x+r*IN+PX2+5 zB(kwK=o8(AlGMXdlpTSNLJkH#@bjUUWWs7!@d#rk>F%)IJ3X#Fbs58NWzg<1s>;jS zd7YwdXShM5QMl;YKP0#DnF=Pi^^HcxYY#8>S;7AY-h%bgb?RW|`Er+`sEU3S8N6V| zg=GDP+IW>wU9?$ve^1=%(t-Zagk_YCgMV{;tCwU}99uudmXY6?z zGt$17A}11jOfNYNUx@<7I@%-c)DT(q;rLmuUD@B9l(?B@s&`TrxMG+g=oLCjQkQ+1 zsBP$q9;Geq0~d+=cCY6^$2KL43LI|r!OKVj?n72KJrN*mQBXT4=(e9McoDLbsq&5J zzaB=N^u}u4na1b$IUJF;DNEJMMNDoPi#ko=ox^)=k^ey2voq7^oXHdHi(C#3#ZKp; zwpu~8e{e3OmpwPQ-2ese}-NHkvTu9olTx^CpeC zDGIoWC)j#)8sVg_O)3>jF>a@q0UF)Nj^3D~5W))aA1D|l_4Dgfat&<%Y`!6^se&@) z@D^{j5vpp%R8|3$b6ro^XIc5$LA}eGwP}3;yXTRFQ;sW^`m+x!3bYG<5UYdHsDvDq z@Ny?=ofenJCRWdtifQ5YoN~ z_r!vX3;Oz_!(7d~394&CkVm!FeVRIDuyTRqYV{vsDncfzp~4>L=iklJnJNUl>WG=sZY zEXTSMxB`3~#A8$Q>6+b>1c>G5i5+X)+oi6;gRXv=RN&hw#0xMt`xT0H6kmVGADviC z$td1oup-{H4V!(KPU%QW>XaO7@MUY|jaCWzufQSw61&NxZ;?U zP#az)`@2Wqu|Or-!a*B|L7UMy&ajvC|2?e%26QZpuHI#{1z6B~BKF~Bk}{Mf7Te4w zKn}G~Quo^4z1`l~8?s2@#~?6hGKK}u4sIUUV*@!?vk6mPCi{Rk4SoV69PHI^OcDBh z7E&nds?~C0wU)9FFPi1PU4fkDI~GJyAe>~y5FF&?@k1!oOG1Vj_UbQaJAen87C?3e zihSK-aefDA#w*dDt~bkSl47NSvsteeYlzoN>n?VHWl54FM{b86brnM9#}EX~W-n9z zsggVaO+~T<_b$#*^1&*s!L{Q-)pc4zcY4Gh!uck`tKM)DqEbeqg~aq(W#=JURuZ<^ zU=p3T%eDyA{@HRO*FqWgOeoVyZp~VN;`fHN`FX~QjB%3AU$C9MJ^_gd4>y zngWz%Z%8rTPhpq*biMS-Br5vRq$1~;NZLv`bocsvE&kZ)0yEd|5iUbD4po=UXN|zB zU2CvyBzmCKz}A)e5y~E9=)D+UebgSX3rMI)aVF?ONg@){BtKXJms|#nK%tDSjDe{kF zI_BCkm07WV2*;s3S6uRXj&|QMDo_e)*{;f1{_4N&Yg%y^x-GWpQqx{}{F>g&K(ThI zXH-kY!_3SqJFdL%HGELb5&9aE$vrd3TJ7&(NjtteXD>R zRtW3IulUwYeJY*cecyghKt-U4z(&&R=|uTP&{Pfo#?Y@&@QK+JTR>@#JoKXqOqb7_-!~69Z`Bm;OFib)xt2Gn1$T6(A8ADrUf>!ev5KHYlK3%RN($El`_Wive z{YKrdrc2_oSs8C;d`0(&)@NLGR}4xC;}?NGLU&cJ(D3OS-n^u>;8<#xW_b2o**xE zUZ*)nM2k|j@8O*@5c~e?>S}wIzLndL`PaNCD}yJ<7&xAcufvc&^)7~^tsjr=!dR>5 ziWmFRxOQk__r4yX|K%#9{Lu9602fW?N|>VaKhWQX!R73TU&H;su*_BiD{IP%b*$Ab z5e%Yr{LN+Je;{4WDJrTk@K9pKu*FyD z?2BnhjLhF+|9Af@0}cH0D=p_(WJwidGg*`=AtWk9q11xD!;W?f@k8gO>E8B4*=4Pv z94ej8a^NW+dM!Ot_>q6RG*&z}MO}N}d@bf*)n3^L*1yxAsJl<<7xg0#{Y$gJ&8%?u zPSH2(u@XNRAU;#YE+7;L{Zk`wczpDAbq|FZBce09OA1PX3zBT&T`O7*TfYsMz}h+t`0 T4oc`<)Zb~aVR zZSmK%$w<$tMT#nCJW+Y}LCs%K!$geax_+F|nFaB6D#w&@SM9yDSy>RiPAgK{iIfn| zK0-=9S(J~sD7E?g^zag->73?XgreMLui7WI_9S5>3YUc47XJ8qUUB|;!k_g(U3Ot`fRD$WkV77ry>*l@ck3F%{rAtf$veDlj_n){PdSsc| zF0qRY3eYY~FrnxfN{|_WEBVzZnkM?7oU%$&gBmwfk!kKY{@>oZOQi~vvNo-|YweQL zJ-N&G>ZjOehdaFeVcha@)#GmEi`hGFL{XIA8?fQ{vR!|eQFvduWlR;RK=HTc>tiWx zm#=bVxsp)2Nae*m%iF1cg}I9fJ8bmIXn%v>7s5UIwMhmHs*Gm=(Mpod!m?2!kE;%N zH|;=!&mzxCOhK5@Y;@^X`)>Sl9DWKgjHV>@DXt>X$Wtj(`yKPtuy*Dc^Gg)!?p!oc z+KSRJ9+iKHW5^E7yK`Xj_0#nY8AUfcif?T$E8D%-9Z@aBiipRCQX9%%Z@1aw^Ux8# zZZ=aq(9~g>OGb7MvL%thKFmhs-RwZvo9i^RG^G7|owmrqsl#ZrZ_{ zyz|R^bi=_Ul3XYlG?B1(2W>(cVj76Ax0Tcm8oqyeR<@qB84b!3r#j-JBC(apLiZo`>1@c+He~aVk75rrIW`Z> zP@Fduv7Vlssx9J)JY8@vZgJBk1~A<@LKnp?ox5`nPvlK7ELsgQrB`*gg`J!f*Ier~Um7ud6mbs}ZIZ}Dyh zRFk2MlGX>f1>12!$Sf#PDy(d9I?b_{5@LNQ`jT8A;pYq8^VXZM%yab97yTnh<2B9x z&%6*k4wEEBBwt`0kQFJxLN9d;MSY+hNBHS1=Lc?+Hq5e&7>@#3eEOx$-4~vj^&)>V z^!AixWzdUB_HVJwmmkG0^GvQ{M598-VOB$5cbSw@wSIKB)%4DK=R^U@sd{RzaE9acFf zWJ@j?$XD3{#s|K|qybp{w`YLIu70-WqCnflGPuf)Q)(ZD$K0ps=xOX{7CWwnqTE8R z?<2*pw;1ZTH(@IjIav8|nPQQpF>+l94@i9ob6_GB_#hrZ=q05Iz>BFZ!M1;*Pfsaa z9CKJP&%aKFr^IQ~7|Kzi$tZ>A(pvujsy}eYLdPV~C|M$Tav4!3P@ceSuc&*v6fV?T zpWskI2Wv+VYY1X7*&4AU>8k6G5M!yCN^50V6Ni?Q7$4v zC?Z9jf113v)t9@7aq)1(YPha))Tl{mJP|r#_q`I$w;e)oPSl!H$KJJ zKHI;TIO!h`qk5Pp?=7rW?|=h~mGlVY#U+C-s$PWdxg=8cilLjyAvVi%kAYiyp&r5F_A3a`fMq1 z+k&}DY?!$4M~-31n2jh1?wwXwV@0ip0Ezvhh|m7%903F^)%7L(Hg+c7A2D)N}3j;Zu&#Fn1u z619C)`cS4Ok{4XI&{7&s>iq|lXnTqG;x1rIq`VhC~{8B82$A_RZ(;g zvjf04*EdodpO+LjH4l{uV5F=Mw(M?XSfNE4hL~=2>1xPEUN*e4F^^J=vKP-vrtX=m z1I}ld67LX%Fjr&@2~f4dSX3tYeh32Uc*o*2&9NAy)>{dD!JuTuIL%~6i>g%WIL=$- zF=ZOZrM-W*gE3;~{AaGF?^!XWOy9dJuLX>R>YJ15MTH~+qzWNd<%e0Tr>_k-PI9+9 zcS}w(AYH6gxgGO-ym3Tjp*kk9P6ja}jAFK%v4-lamd>9+^(IC>4m?L69%$!ZF#SX+ zAz3XXwRQj_c*PH$fn{TQ_15@WJ{+95qR2wtE0TYGuIxV5U27d3Nz|}JWf@Il80PJW zNvTmTYOs6Ral4Ry+}^OiZjC=#kd7lsW5|4u3J6ijsULfAhL#>m*s({=T{q?Gu#FcM zQ4FUlZaA@CYmD^EdCvDH&l2*qjh^<7Mdv2GYEQ+I_ z?)ZNO=vQ8gV9G^3)tcYxk}<yr*u&fDo2q~#>|be0ofL)jlF-^o;m9M`jN(*=&Gs$xlS(l?|g9e2gro= zK5H#Za~aN}SBPyZjOF*^sFNB00Cl5_**Bvg3^(P0Vxpre07W)#wl{VvB{;E^qZ0O@ z0^fHxlaVZ|`*iz@jj?atagNk-=OLVP(YP?mfW<60`w0j>HepPn)u?P#vXV9b0OWs; zdS0ffDf(6%o8~#Z-v=LR_r?{eBHv0%%Dxt>jQy&g*m^Ivwsc+BI$Y6$a7FdPf9mzXB|pu z?!_-Y93|V;qDiLAN_~7$7F{AFX`z2lzT4SZf~eJi3$Wyo*2WT$N-*5r`iPNHERTE_ zeEObV5Y>c$x|DnFl?UFiS75;VbsTKtbazFgrHc#fJ=TqSjxD(7f@-?SYU)K|`fBTF z&?h%Z{hWUN6g4#D1ShnlNGhCV^yetIT34b8~z`cs0v7<;!@VQM{dgy}r)>|Q@#Bcl#pCTb zaCYoz8gnDXlO|27MqQ8}(`Lsly#U8^kD4bw@9dtZLO zey8z25%V8ZV4%QNtd2I7+W0%so`H^0D(F_JcM{5@n7xc_9es7NsTLf1k~!)TnTctw z->NR&d!jV`y9ZaBQl!eyP9QgmF2T-ex_{4RIAjVL(Oha6Y{2XYa5%HFOa)A{yqF41 zv&-ZTOt;=J0SHXD>wEzSOt<*R0S8RC1Qh}mOt&6k0t`&IFRcO~Ot)0|0v1fSZ$$$Z WOt*@W0~1WQqTK@+Ot-%&1T+TW07mWr delta 20231 zcmV(#K;*yW-w=r65Pwih0|XQU000O8gB7o72bOL5oYR(|AxT*fAy~qfQt$@0DrHK0EY{J$Av?{h5I)M zAO`^85&qi-{I3v^5Rg&e;ZOnpaYa7>;Njun5aAI1TZnLQXmIcdxQKYv0HlvJoXA`f z_&^O5$!`SQnzZI_gi>J;R30t&!f85wXv6k@!{Gq`!~Q=y{C^-w0Av)j|G1L4Z~*xK zkl~U3dl2CO7=Li^xYP)EoB#<8ns107x!l4eAwW&@!Up$#eD3h+zev#SE2$!_MvDPt zj~RlL>whZ%Ooab1a1n3;;(+JA!xC2f(FY`TR*VO7d(?wI>!&VsIgwzCqO9zkx{(Zl zk>AeK;%TgI*}KnI6cka|TLRtF!yez3&OCBGUs{p8CVxhBSUzD=m*jjW^y`lyCjHbX z(rlrLWN&*l$(dSUR4iz>UW*G-wQcVydQyDgy8F~cs1EwJ@kVWw_4$y*zN$JeD!~HC z9IeP8hZ$wmO%jCNM@XN<(z!?occK=%uud?$CexgL&M$gDmJH)J=vf!vmkGO5)TPei z9BTDmzkkh|5~vcskt4+qFaWk%CpV@L)4n=YBCb>rdeO8ar<7MRk!3Yy*aTYc)U<#2 zjB_05GfC?27{tB)k=$|8r%J17b@_w=YLFJj1LYfKqKjapOG85MO#_hixFqllaHsS- zwh;f-#bk1)+uA1IrQo6kO2=}F8}_c z8Px8hqJUqRYUe89oMEF+M_#2B!hA=>+?E-jXN!(B_dRS|n6{n#Ok{@5ZBM$MES~BD zQ-9x0OkS@{#`Z|Kpb@bb4JTdI6FNeLJt9tfcIV<%|LO0{;U$<4UPYt$8$-&3B&bYC zcrMq9hJ5UeGx6p5A3*)Dww0cqv0BD6X1W1lEO8ah3kPAmC#FJe1=g2d!`{4tR0hIA z3TZY?L-G(cykWIhA?wKe3?`wRR*5x}1%FjId*=JTn0+aVD9f; z8kyOYN-~AXX9+hlr#CgUy+q0X0U@K)tF_ zYG)oZRDr~@1E&U0QU3sSeC14m-~GSZH$m9Abo59#FEo=-!2fv`LU^wL^y^$39t+6qJQ8&BoH@lm5v`I!p6c{k zRlVB%nZS@txfVCt*1?Js{f<0=x?NSjTScTM_uY7=uwsmE*`@@PWty<_a{GBEr=l1R z_=}G3+WPZe9pfZHUcBmE3V%MBx=aC>s2Im9=pAJ(7zmQ8#+S3`bHYTZz`Gt^WWV(va-TUoz)5u?s8xtuNI1 zyfa1uK&WYShw4#O_{sIj#T)?nT{}$nAArKStWY3QMPQ^4+NHJ+qgoL5@cJ|6^npkNz=vt%lDKZyF(PKc3NshDB z(Q%a-Ta86!DFe5uV6XN$KFmS(TPCe z#HZjM7Pa2+GsN^QzfF!}ssL1sOQ%-tvJP429J^W|dsBKAe19)~wm1XH@i9k_UA;w+Ad7Otqn`);9)-siL@^<67N6<@%6wA6|%dN`0D< z42>H7W!>uOs(M9g`&bn0->ESjiQ1H&TgNP3;c$G;?j~(m-^AB^*X%VZs^>||MQ>=U zPNM-@U)fAP5ivPO)&_^h(tqUY>@94JBp?(sv>jXCPk(T;^x`Zh(;r!#Dj|h&R5<)S zxS1}YP2xN}Cg}Rl2^Xg{4=i|2fT`^((oQY3oHncq2njFp-)if5EA~6T22>dW<4H{B z&z3e_in_XTU1uFAOZwHoZvOywmN0U!ug?+9A`ROI_mentqB)b0l|xoZYgULIhr{Hy z)b>FX?tf=tRy@>0SxmCRxyK+hCC_pNs3F8~$QA$ExTE2O=~(e=&Ms;{b16DuZ}z2l zlIXL`?bN(L&`Zm%4=Y@i+MZFaU0S*_YmLGF@c2*`4ig`tHrEi1sDW|Dd>9*u9&up% z6OAH2`$Dnt%j_o1B-P+kanK>+vPHIR!^S_rYJYY70lQ&Ut@Ct49ISxlKAD%mNj4EG zuh9ZV*IcvK#P0hoM_^b={{3`vo26D>sz;(&z_0ElsLSKwG$l)fvW>-W&UG~aPwlnl z%E~!(??(Eya7}+3ypKj9HUay4_`->B>^ z`vn>v+E=O3hY&h2x&y0dp$u;FNf)|y(FA$I|Pjw8dr@1YPwt( zwOD85Pvyrx@Ls9r5A@a!?j|1d-_*9nDE0p=Y%(>=@a#A0nzk=H zbJW6E+I{~4)W|giv4>=EqI-2Z=YNo?!-_2-HW70-ah)2nyjGmbBIT71l#@2#PNB~; zo)5>e!Zx{7mSdIyhcK`|4mWFOIGQf1MI9pvMfy(SUwj!Th!7bFY!nS86v9) zqBC|?6qyJU<&M~7TZ~hfndERHYT+%gdB!%A<(w!J^)+0=j z+p>uJP+W4!l_feLT^eyrl9(X>FfiRHM_LKYK`A+L=#w9Uun<~j$zy3p%jz|&!L&&~ zXR@$ef9?uuU8_meJb~^{Q7~WBue?OFCSmrlR)ot}jcNbpm9f$+segy4Br^(-LRCJc zh$m2!8p!?~H}CidP+jo=?RI-+*?pd?us7m*7+CZ#+PE`f(7un?%ADpJQ$)3->tkw6 z0)G5!GVoS#61vVC%6%}V5u}m)w=dPk$ZS<|v089SjhC>fPCuF*$Mr+8i)eR;YUIe5 z=2lYmiNQ24=9)p@OMl&Ed^ks24PJ?Mi5vE9)`Q~D6XCy&OxM&n{2#{h7c^p*y4*L( z-M_8J7J?CF=|srI4$Np@zt|#l-bVD`apQtCTz@8QcKGGKDP=*U5QQwwlOqX7w96 zOXIn$zpPXw#eb}t*B1J`_xA}i%|-8l{d-lvYR#@|MGba$nLc~#`Bx)eI^7YL9X(4i zT<1jhrfz%W`~!63OG%KGG@{;BrfVn@hL_koESi=4;eWu274Z_e^~oSkcS^N6WuVma z?V^Sec@2Ban__jT&E;r+NK5!oTN)tRj_psXZ=EOt>VNnoRL}fYfXF(b&s@|d63!aU zjFgx04}fvS#R`7CKpQKTF>EJICmW(mo4pg`>CF3yCTx!z$5P||(3u!9MYpM#!`7=> zX-TwHW8Y?#<%buJ4LUjK1DvpGoYXRo;j#<46}AsZt&-?>zluB3;6BC~V;6LQc1D!v zt;d<%&VLpVDq>u7>%%Di0py9Fs8^TQCF?P+b-Dflh$>Yw!B|52_+9mLpThG7B{~ID zeslf`63ThEI#Jlsir#7G{Inr9KXUhMP%@;yrvm%VA1l|=gv^Ye!`D~QieDqfm~c#6 zTB*Z`PqP`kW?dX$B6{(hqRAwM>uy@r)sx3Kq<@odf3mpLwehu5ksZ;1Buj}!7&F#v zST`D}OWS=w5-Yq>iP-RDQa~0-s{^EI0>w#OGplD|trfNI{WUlXT9VZzlk(bdi{!!``mgGtzX99g=%=qfVx&D+W&9^Q`E6cCu3V%({ z+C>|L-3-hApTu@1j*@61@i8Hx7Fb{Fk+P1ip0-2XQ9EHO@rqv#Cf~_(T6u$+^AwM# z!-PL58?V&oG{&=VphWPo8l`57N&sU@tpPOY%t`dThp*({}Sb#^M(`bjoXNH6*Yg> z@_k`8Pp4%kMg@czq)#BH$;H+goGR{eIeR;x4XqOuOfldl>P;G*kKI2Pk$<1?Vj=8v zX@@h=M>l5=qol)u9Dxw5Ey{ zv#xT)UxVr!hlIv-eOk!IgT7t9dbbypT%QrVual z;q#lKzk*O1-!=NOpY}F{ESJVLN#FA)BNcF0$q(9ytM3f%Cl?8VShmZQ3?i*G#B-EV zcvY4+(cd{|b7TLGpMRt%;ePeeZKPEX>uVtoDv~3Im|-+Y-B6puz<4iKT_yp@v(%CP zC-$#LxKFmZTbk9`Q_=N&hpX+we3D4qOu(fN3J}{LKVp^@yKn4u`vH(Y_TpTr-cWUO zLmeH{C#qiOaOtIuV$I-6)jyK~4bB}R7C6%1hETI*9Si!4bbn{x8M3X+$j%*p$m@Q) z8a?`^k-+$ic!-;=<;R^Vd0PGmX*O$tqD@)4Dsi!-Je?aO*L3paZaSFf50JaDMWn=| z*#CziE1KBG%IWU{%8|~l_K1N}ppCu51@p4OiR~GY7w3>-ETA2o=`gx!9XD8rqb+v- zizNnageJ%`wSTEXT0QiS2s8hv)!uaB*rN&_HxMy#0fWmX{f9St@wjnNSy^2)$N_icIuTwTo1AKGioj?&G*(!nKWPgTCo3Tv^v>9UJL!&bv>!W72OEAlL z^$KSZr>X6mRtp{>`Q<;2pvhcq&U(fHu1*>vO-{9zQ!dIp&oYPWjFS`1=PB)G(X?D$ zlbz{{-y6`*S9EReKIX<8wj@$|am+?G!?lIq%c#vxL<0OdI9ilc&ei7v#fK=67+8E4 znbv&rE+e=*|UGkUUSzhNO8Vlh)^^kX!&cP4EW zCm5O~+j*6(dFT?e1oVH1yJ(|SKTNNVet*6-UusMR|6b7c7L^M`NJ-eOvi4SvAmA!8 zKm>cDd~=|6=|e9FEx&v$MGkxQ!o1@(aLymk@-=fxc5_rfdiqk#^=nb}Qwo%a?KiXH z-&7^Y#n3@ar7(lng+uB@!^kwD4R!w~`CDfBF9q&>Q2eVe@F)W|GzMI0vU}S~cYiW$ zAzp3uLLuB-s}=BWjG-RZH_)G=U0%*fT_rQjmP443cFy8zpkfW#biBH=B}R2Xu;>>G z-?>~SGvKcu+Vz~f{{WpsMu)%pIwUR2Scs!1NJMf}Z5`j954oG=8|DA@NYO^<==#fq@l=5D2y zIyWdn^xFg4=UGZTm3|hWX!t7Ql3`F}KobkYC}OZs(>qCIK|fSpS-XpTnMT0*$HO6o z5>D)F`jev#6}qysUDvc{j@A1vO38x!*%NUI6x@CuVQVAp<3ykAL&ZHf7G~ z#fbXj2UxF1%)IZg5-6PIV#U(1O1LB5vNFL3*pqg_m!|EgXX-c z;6)WMym54FmHdc@HO=|(dG`%l`PXlTu1TTZ#X$FKqXE(JLdM`zzrUs)mrx6t)uOO0 zPS-TEc?eO>$PVa}i<43G|;3`*LB`?uS`^^ix zz?s(!>sEt5YrUgIzilTQ7=1aL&u3AmTKDtTY5>IK z^(yN4g3>dNHj#KZxqqsv#?c2|xekldyr)XyfsZ3WTx?E1`q=az;3v@Z0DqI5+GqZA zZi#jgYG}HEpN0<(Qk2Ic#V=t>LCQ}~7M#w1M#OC%0M=oKrZ%5s;Ims%OmGZ~*<{%w zch;Rn=)R%8|B7joHsPyS*4M0pj>7PJvlsUt$!Rm%RQIANxtVzZ1rbf zziBMx>xe01Z?pK2`_6F7fN<6Fi7HrYBgcXyZ~cYNyh>c|=1C3ujqtS{r{;))5Xx)I zTfMF;Ct5eo+ye1;^WLXSb>Bpj_&7X=|HLHIHjb2{msEmkEa#QbkvS!__F6(^@;XUB zg3C(k*w2!#xPQ&am6M5yBF~;SlhPB4#5x_^M}MrC2xpPspIcjlwJ=eQ&ZTdF+jNMYR$-EsFqF)05)9ScQOap#) zJGc>FbK+?de;_jRCNFBa8leRfXv3XI>^!G<_b^6EJbxFb8?P^~>G7+IKCM4JA8%9i z0s9U%>0TaB8dAtnM!rG6Q$W7Vhcdlyl_A>Ke4&MW$wLEp8PT2t1<3?zYg|N2WY2;c z)=zm`$`Kx|;(kAtZepKzU-P|nd8K&UsaZZhoI3voUztCcY%T@Y8+l_DTiNjGIbO2i z5)_R?R)4o4;r6vW`%7nox9wx%oU^lWZND!o_taey45#otG)b=$+;hiWoR=EPc?U=+ z#nv6x>*V)2CGcL$Q)FBVwfVM~IQq!{ZXzY|NzlvbT4+EJ?3w2?+{+qPbAHBXb5nh# zKU$hAX|}}p=~uAlAB4|20lq4~Wa9~KDC5`o(|?GsIbFp1LYZ^4XRpWwJ-!ze3ST7h zt>`{;60I%*mzg&uH`F%9buJ3r_Z>#B9+qB_Smvn{q!quAIDLqsI(0IO^5TOXTF6!Q zXdZkGljBi)B@D26S(IZ^?B>GZq_I8)r271P7ur#q8=}9M=v*8NzXi-iYkom-VA%Rw z5`W)R=v(<)%;z&$_>X~JF#F7J{=g!PPCEXh)_;JU+3=^L)3oCrkuwu2cYNuiWBz1h zc$IGWBMe*`2iVy67Q|19Ps+C6<#*_7%0H5S$S>+ze&i_RwQgY}05h#ov>D>}%Meo(dYh=h1OUVn)8PDi+KCi^$NlS#% zok!I@zP25f;#mxPCtJgZ>1!8jLJbUio7dr6Z36>0m1gIwXkp6rn#P8qeiGaUQ_`yjbvq0LFKexVgC1CennG8mQY`XI4=35QDRH=bKF0^<6O&gwduTGvw!>$ z2RSW`9{YfTWeBKNVruRmz`0wLSmEwS#+=MYl26agoyV$&BVqL8;2^#WLs4=CF*-`9 z#N)EB! zkK!VA$0u(^VLPIK1#S`TyyGY*kbjtgnkFo)1NYUQV<;+)f|JX===|xrJYXzOQ~ZD2 zcE|D*w7Dk;Wo4=0`}HguI^jxK`{mG)z>J<9s>!G+%_usf3A-dy{JYslR?LOS8*q(R z79njREqi`6AtaE%BrYbaD)oEfAFXr-aeFN|CZtKC7EHbozS)tPg9CADVt<+^=f0jQ ze0&Ldoe6r`c0&LLn=Y7A{Kz<|g(RJx+4c8K-#x>Pv1IK2(Ff!ubaal2r7`|WL`+2a zs<^_t}t7b-w3Q7ftO$5PbJY3^Kgwm=BxDsm4)IIoeaF*ARqC z3Mc&vJDT2-IWmzk8Qxz*3V%bl*Z$CMNsLM7zgFyH!c|-$nL_nD%~0fagS~hlZ2*4l zNI_XBwf^SlR=6!Z%GZ1~d`%w;AR51{rrxrHTEgP8lN%QFmXmh0neRkf&Ktu4O`cQ!si>idBn7Kr_1^%6@_P>a3L!J>KHwL-E330_Fg6w1jb7wgYlz$ z2rGfEQEK!;h}S4BU?e#Dnsn@$fXu~NF(c>p+)p#Tlee)-`f0BtqeF80ke~DJZu-S- z!A)78ueT>GcIB5_jU~HS<83dy@Gv+DYP<$EDLk4z_mr(FB!3zoq!WO{66W-L2NR>% zmC5!4(j~I{x0x)wlBU_t;Kli-HVawav~5#hXjPG<9mD1eVAM1KsSU-lJMswI&LNuVD12f$Ps z%D_82_c3wHi;gUE^1S2MLl?VukBNS38lQ>Yz)Ys4*+r~w?QCoeo$NkgB`}BU&A;~N za?eE_<4#9UJxkVArbk{{y4eBO&wg*lwVZzKzDlcmRFvb_Kk*rhHYI0FU`h;h!|hud z-+$!c*MC%6f|E^^pS#QU-(<6(vA1SHbvmf%vV`9!t@g;Pj|3_nLm;C}E-!l57E>9$ zx|rnqNp1TUb>t2rr%AXY@OLnU^uQkxuAgjd3-y;=$0p4uQH}ytYewztf6SGxHj;Ye zw-mP*#aE?dA9Sot|9*109lOs2Z^6oWWUmZ7tbc`0iQ0!4a*(aHBLpXsMh%E)__+Fc z+@J%3sRSTf9c5>Yupd(V#~J0K`omm%b0Dgv#|V3Y0rL6%6NA7}|6LTX)z(wN9i=I; zxuDMEFK?|LpJY3}I&6={#sRaj`ZYn~)UE6|oGpcLmj$`Nf_!$jZ+AwCeZF*}lYUc_ zn}5@(Em)0_qhG2sowUzKVr&x5w{QKRT+ufjnovORdDUuFA#=%m0#8A5t1ZG%_E$)5 z5Vgskos_T!yUA}O@6#zv{o|izPMs3ltG;f{u!PeEsDZU7UX8N_aFlU#_$3A%4KcBp zt$^1W0;32}+X13I7cE@wrF?wT90eb`oPW=hj12j^9YYxMj?~x90CjzqX%i{KRFHmo z#q$Q!^)SyuDoiaKfTO&~ADZ@)%0Q*K16fJU^cv&hBt;f?N=Ib66xes4$ik1JLG>3>V7 z2AP4Vq!OllW0$JnZP-g`X!?b^_4a}3;5+d+4nuiR|lW2}-wRwQ`(PrlVR z3of8#nQk8V7x%p(uFEJV*IWE2t_iLW1)i8XOM=%LR*XTRbbuI2@o}fkFX{HT-y(s;1 zSS&_ewcIJoEUn?&CoL!5dphapgpMUl$^gUV~ zw&xpELB3@6gGj8e2804s(b<}*K8kuq+NJO>_&Sd@L}_~`D#pg2(UeA=j%S@XoLO4( zgY~mkhe6X>#b@KM*ztggntyY78N+oXny{o zlkE0tSz5KsC_=8Px8Bu+E!y$hA14Z&yQeDDL8=6*yC%vLL81@pw0@Vg;Q z-v3BJIU8rfWL-YEPk=z!FCQk&n7LN}7F&NCJ*-WoLZ%`2H%Fh=(;mOf7C7iElC5bd z8@K0CcIk4>YjE{rLbB@%w>tm347Cx1P26}q*sV_x~{{LQ-@ zBv23ktexWJ7s*}zA4Hn4IGKw?%y*9Y&B7p>|!*bh$FkJQ2fx4%( zeu&8VhWr7W=FW7^GL&$@X_5LdKkCs*DD}iW$%|g}QcR#f{`RoG+SBWuLmNc*Rik!{ z1U8$J_b?t@)qf7F0!x|u2vRa#TLtDu-zd%~TKbuoE6Ja|UQEcbs*rXvs~I`@*^b?X z#MM^ws7$Y8zwM-f$Tvmqf~TO_0dz-+1g-T~;YKDd@#Hh$-K8c3*s}%rn~h$+2d!fj ze%m<#CR?s!fqyjn+L=&r=yA=%B7wta7`#B$_D*(P<9}$yu*glPX1bxTxo87p4AO9g z;rECvc4ZqHLAtAf2AZ>UZ?k`ZR{f>b3}9kboN2z@J9T8)lyYiine-ctX(einR8F~Q zy{Jx+;L3A$oE3y~w7KqycD#|K;L45O@fUfreUve;SZ5TA|K-zH)}JpX1z>+8FP?q2 zYY;EPH-GxzirLKcefyl9nQm3q^+rTl?1BqJ@JvnL{L2dw*gqs8x<}zds*{nuQXn_p zDj8M6LXfjIiIq_5WBSr%+i(^|$F4)gY`o!D(aSQM zsU#kLVC=|~BxxGbprHf862K+pfo#vS-QNCnZhu{~5Jf$LF$jFdgY*OBmnk3cam2EvK3P0+}n;U?~?J zXjlxGV7x^Xt}?o5RCR(>ehr^`d~$xg0fUCJG0*gAmWn%^(vqcR`|F*+<2nHm+Gt(7 z1%EbdN?{~p(TBxMa#(m3dh;v5=$vCc5~aoLVFN^|emvcfrp;uLp6Q?Nr_8B7_0;BM z8>0GPL0Y^zn7K?)S?A&9+-BQ>a?QoU*J3RE>cD`Dx~k~1Sr;)A;#2)5){c7z8flz;TQXqD zUXq_xPK`3xLQ#H_IBNemxk(`&^W#08SBQOsn(WWYf;NYQ4|9vmo7w}O{FbL`bPl0L z`th{JOa-S2j<$z!L+a04jYUmyw$Mx<)~_sj)%O&l@$-2(26%Gv<~&Q*aUuE14u3)f zQf&=I0FNIcwo9FE36|g*^wN3R1^c{E*27C9kbj=EDRsNkkXpZttsFbxp;d6m$D~tv zqHcFR%k|(JzQEu(7c|yJr355jpkbr#MHP-h3C=HMvR5o_O(=FAX$fz;F*3|}IOfj5 zqTyy%l#e59zf>Xc1nX|Bizs){C4Xu$kY|{UO(Zz-&9TL_ddL`rhckv3Y*K{8sK<7? zmHr~`=yEdTj-6F0p!@DYf5K^jm;N$*eQ$r-`?Om$Q6G5#`?!Z@XX0-cchp-X4KB&9-F|iy9}d zYD2?@e2wuMjDzOHU8#n@nWGMWbhBL$$JlYstnj2;V-A$+#VvGy9YR@_ucAt^Dc}_| z9wyt-$ebhpIV`qewwvtV zcK`S}L_sXVjwW}Q_+x+E7Y9l)OIEM4JH^#St=X)aHh$bR&P!;cCVvBg*Lh5-z3oQ@ zT24idQV)CLrYdl8jGOM@AeiU^ST576r@7fS5PyN_u z4DqZ6l;ij}uxS^}7=N{-nD|8YGmyavm(J?IAcGIS*cYffC{kgy`X#b0o6BDKW!L?S z#Z)oD^1Bq@%H5Ho{{o@`7DedL){uZ3eS1-7o0^J!o74DM`ga(doc*LteSS!ln~!O+ zSo3>Mh%RTg@9yB($g@_*9@7;Xry*Te9$H!q4#|-7RTM>p5`U+pJdYl~mXx+}Mg=Yg z1h;?5uco6MruZ29kpWF@mKf?IOWX9iNG%n`;(`_w)W7V?d6PjzkG4B(Em5Z3FQSPV ziy-X{oMzCBXIWVKC=qsA2?+oFHJR|QMO&8srqfRv!NZtpwrP+>xv2d2wwT$dmZCiK zwnw4w!pd$9o_~3WZj1`>KlU9zr~wE_*_x0rrtX)IimNGp4u7?p-_iTksvN-<>=8tm z!`9GVIC_5H*37SZ!)ngUUE-UrdHTW($4+0~PDL=HZ5YTxSd zT$Jv2JAbz;Oij6uTw&fyaMx&<#pz-jHLR{(=oDi9Io%&;5^!nJ%L7v%CUq7`4y$f& zW2MnJ@T4r#LR2MlXO16r0&tp%_zZ0GSoP#{TNIo=M8V;%8|_F} z9>$BOeR?A+Ef;i#X!6Z;BtE$o_wV^0)|HKCy?;V2IwXXcAES(*C@VEC@^NHO?w{{S5Piw7z9ziTY#Dp-mC>>)K+ z|9=BujnbZc5eJMi4(1#it&8lo6Y}b|TAUlxe?B-+dC#rf>hLh_in``Fan~VM0s0a5 z%rG=dbAS9)5rP>>5z^9n*OOAqdkguV2hg}AsOr2U-`cIB7L9F_i=$p?><3k=JkmwO3&PFJSE`M=wDeaFl%J)Ml#eV$BM_`fYG~YJ6o_%Py z8ZQ6R`<^hJ{9tSzrA7S*zI8QOerFUhoBXKOr~&AVm6yxwm-0hcAfa>77U>{QuW0Q} zoC~!z-jwA0a(MTAyYWGrGb*|IIY*Pg$PmX)44($<5dTtlk*%a{*MWmSm{@x-d4G9) z1v-Eg0$s{9?tkOutp!cNG)J@IxDq#Sk1O#$UqQ;oO4E|iYb3ORd6_&ljXuki<-}l4 z*p`R>krj!NFYCL`gNMx_v77#p0iDYC26v$C9%DEYnJk#2%^QH%%hE>vj$AfkudHQ# zD6CZWQ<%KN;rwL zWqr}lBvLgd9NwE&Bl`F8SaW_MG!g-;aBYYH=6QdAj9@*P#3uzWm&~Ltq5DbLdcJ4i zOcZYGGyKTo3Dd}IVQ#BjQ#@laNnrk?rzz%fHPe`u7N_5*L&vO|D-)q&>wkyn6hUfh z8|ZE7e*hRM>%kP5b&E^G8#Tye^wRPSG{^W5dH8y_39Q9k(@y(|(R>Y4$VX+xAgfyqRR)2Z3g@rr9j z%`Rgja#<&s%N!4(m0#}6^Xp3TN>^wi#<4@xq?Y@=qTTtdyiQ*)5G$*nRX@c7I%r_n z^QDp1_-2;6RV>zSG%bE!n%w$gYX3do5en0pPm=*t>e(sX;hJ~!uz%3|xspGK&-EOT z)FM*9n#<~nqg$~319VXBRLTu4?iP94RNB!PtyxN7R_hNvU0Gem7^W z)=jl3(p_i`F%L)UwnhUchVuW_+Amc6^|K$h@HXM>SCK(#Y@7J z-5xubu~z4NBTcWxla9GVwzQo!5ag?xtiFKIw>iy3(kh!7UrgS9{ z55qW~Bi#0!XWM*7Ug@5CSjQlDb2m-M`ph0zH5A_ab6k~^l|oH z^lf{4UY8X5F@HQ?R?H{of@tgb5712!kVW>uSac&|(P{pRDur&(I(R?6Tz!V~?oF0i zU+$$eVZP&wfBRy32@@UidL7({=hBb-D}yL)q8v)wcN4O#W>MjQj)9qs^$|8x`^)!S zZd63j_0XOHat*LuM;l+)oRO<_!Ivsj9;H-aPXBy*CV$nrlm*W3c`zZ#J5SZW%2ZhC z^ML7ei-;8{)s)AS*XPgq?N%%v((8iMt^NYZ$ZFrEo-VY#U4q`m7kr7GhWjE2SCqi;++LnA`9~}beD5M{0xh<-)}RS zU`jhNTuj-4`M}IQ&O`*xcu57htv_0S6&j0r;OZi@d^di!P_sIvPvWN$*!vgXlkdV5 zD}YU&MP>?4Q$HoPS?)}O+Jbn7M#j^W4TAN5D1SFYhbMbPHgU(XuDw@BMNHS_wxjN5 zADxV3xuYLP%A}D+YvY{{qiYUIz2?po@eJv_Qk=OKJ;eGqPA86Hbjiv=6ylX8iI{*Q!XVUIu`zt6jQadY4qp-{$j#hL*EtNRz@ z+kX)o32H0EYGkl=aV2j0omPl@v{UPTeNqkuRSrk(Tw<@r4Q6Q`&8GM2_OT^<;!K(= zZ_9cb)XnD!{yE1MBKHxYZu<|K5*#(ki=<(tq!M7PD=_1<&%{O^EwF>#_hDt0pUjl>Kapt^6LM>$3v8d1u#}+- zuc$#{)LAz;EK1K}ZN(9->wTJf%gY2c=TJ)`QHNqq%B_WUGEfoFYEV@m!*Q2)R$^DiD!Shq%O>B zGG50&pwiNr{$L>u42tqY%`x*v?fHzqF&rL>e>HTCGbIUK**Nb|rj--d+Nz1o8q5>A z6Xv%@=7WY`+ps0RU@fknF8J#5LQ5>Iy}fJGh8L+sUhLtgc_c)hQ)aB^?0RDiHWal?Y3+hLmERrp?xN*Ey!`MXY5 zOP%gq%j>6nju_kYmAchZCV#i=Wu4Zjp3#HOn12B6`MFtC&a^4^WiE&2GN;Q3Tdkmm zKiF4N%)>tdP*vp1&)WHco5xb!>0h@_#mMBG)7M4S zGWar_h&uT*2tWp%)RDsdKo>gBxekWGo_HW>vTCc$&jd0Ge7=@O<=I8kAL`T*|28n1=ngG8%}`EPM7E_1?9@}7Go;_B$l3IGI&;&J$b4t z_qK@w<-gd*;Y{{8Gko$B>E_nc=N33t5!@rSZR++u>@{WenW`wfcw&QO6VSW~_a^|1 ziwB3|B3;e<@ar0WkuXcZDG_-*TwTvo#^sh)IHjBb4=z%34S#$qOruWEZV|S(KVlJe zcC!p=IjiD|y>%L(4aqM^Dk?UVMXsOdgnpBYqCMl-OM;lCO-b#yaHKV=m6$=@ELIa- z@m=}4i6&+ivo!mr@!_j3Q@S^}cPm_lVXl5Tl)(EL_$wec`wfzHEZ`>aW03bchBB*drr8PEqJ}`Ld~d=D6mVn%odo zEBm`&-?3OF&cZ=EBpzi><22h|(*O6Y22jVs=;mV{TYv?fC;T984hchLN}0`kGU!+f zDdV8?e7gAt2#esp|;R%1x$JZ$^O9xEjOWFQ<%=DcIA}~cEY}llRy1Dig{(>vpEzEBYB6COr!dt`8UlAZ_#eVWCc^99P-4OgMx&*ata)YU5gJxvw)s$E zoe!(Fa8&+zaw4}v+4f9Gv#D-vT8iJBHx?HetFtGGd(zM@R$0CgYLH`oP1jXpaTee_ ztPtHzR>E_S$)eEl*>u{Gakfw|`RhlT=iHf~wtt_p@~T`wTp_8rz~bs}SFnOk^joMj zJv~PQWW(6PDiulCLLyupkA0z9G&2525u+L&pJ0gyyGDYk+CZmKTgN!M7b@H$X3-j; zEPF?S{%HoQ{HN=+R}Nw6k5&~q&lHjlf{};!*IV)D9v6tYe!p-fvT=mEY!PcTX2V9a zZGQ{l6NLtruGEhR_8>#=<)pfY?tFQ-o#)cLDXFsJT-uK6t8*I+-_sGX;;Cp|LFEYu zs-=89O%tuHdc&hSfh_^U-gMJq8lLoSo;M^}p|!-2{bD_AHKDF#4$Lg%sFN=LJfUN* zEmM=5;0Jda!E?hUujgp@9laW5Xok^7aECYgP60(HtpA6lj8`YIt{sekFo0%%)fZN(W>SpH(2*OwOwK z7Y(uZWgFSv@280Ga(_pOX=SzM5|=qf6}RJQDos%8|AvTV44$5^))H!H2+sQcUXn&r z_p9%fxNcL%om+bofr~+O6tUhA_J6YMBQ~%dmRpXXVL|HE40Yb4OorQHdzWL#i|ISD z8|6EdQ>0q+LLGLv(5_!1yI8-?PneyR{2YejrQY3T>3a_QVN;EI9w0?+;GKvGzELW4 z`^Z z&1pWwoe+#~uSOliieHHsGl$+_@30?>Pf1V-Fdw30GQ=Ql5-Vgqob#CUn58ZhMS=UY{wd8sE?Uis>PGq~tbC3_I(R)ID z*XTZBNo@A_g|!0JaurI^%{^S>)T3T!BUDPh&Z07J`2$2lz9>i(R`>w3Z!sPoChpegSwU!d~;JmiRkvn?U z_xh_X6h7^3x>JNRDVGD{uf%-IcoFgXqHz~XgzDt#l|Id!b&B02C`-qR+hVI6oID#b zr#kWZERDv$&o~|D4>k|QDone=>l`K46YEricnS1`N{%?(ZKPz1R4ETa$#IMe9TYSe znj$zQZC+l28ocp|w94Odn%d$vCL62?5<21yC?!Vf3dZ#aIsxJ)d#K4@L~vt5oGnYq z$rSTPI^^L%f^q5XUuincQ%Jo<( zt%AGIB$*1e>yLqvxWOKz4#TUG`wtz?_M}+MVdinYtl1-p`FNZZ;oas!+e(4DTWuiR zA2uRj3%8YmD)^~3F~#_#ZJS7QyQ*5v;F3nc$0W^G2a*uSA0iy!{ctyl4+D9`y-RIOPLb?>nYKH2LGD&HgFA_(bB z_fH7^eGPb~!xIkSIi}lr(uQ(*`7k=sq1hbeF|@_*%BpYqdgFB_z{_X|Zk%7NO%Cvd zDwO9d_lw4%`{9A}`H8)?o+~Ij zvF+fffXH6Pd7_CGOLZXLFg}gIZ-@_Zn<6p;cSF~fsae*cwij*9<#t^`XoNna6g;~SMB9jW8PkL= zLz`oDn0;BB)Mx!x7ZlCzjqxyyZ-kbQZkk3IQuUwJ8VMQDARZRi*W}nvWeGMEq2i&s znAy;%ov}2N$<`T>x*C=U{K9c1tn3gVlPmO9G9;~Nuw9-lwn^f@On;i599d)ir1e*V zA#(IBCG23%I<|P$Tm#)Tui0_f*9zSUq`6e?BcS5-<4@NMsW*QXGTazN7N08UbS5)t>`NE( zKPx#B^wqMC8_P898T)Qqg6?wF+AJd89IfE8SMgLZR6?7V>d9s-8CdJ zwmj7%O(#TU@TVO+MEzoFC9Q8#vd&Vl-T|?mgsQS>5F1xNK2SVtqy+;XmG9d1U7@L2 zgXFRt&c!Den~~O5rkV3-5H&^GHKQ|dq>DkW^B;JWVLjKkKGTSZ(4|){`NHB)KEUr6 zRJz&!u%hD|y4&jyd8-)U%Y&+}rTTmXkuYY=xEeY;Ivaj!jj02jQq7n& zSO6!NC};*UfT02*EJ~*?;CJS$7R9XZ#(Qt6&tqyoKe=C<`W+r2 zg59pjMXB_UXKT;)-aT&n)g;iA!MEYDFRRQWm7RAW2TaqA^eSEEHu3iyTqgfLLuL=$RBPvT*W4`Vtc?|ziiQ;Ks za|h3-O3wP2ON+u^HAjd(!|*Ru4{If6$O^ZZ35U{RbbmfZ;@>$hG;U}Mq;QK57UJY; zp1V-f(ndD&4qFL|vUgA~*U+YOtskg@GQQzb!pE;8eklsaaSMhcY1svk`YUg^iqq2o z@~WZL0%#`Jf=~kK&;%jK%5vW^L+%zGEbB(tX40+d-xAU59pWPB0&aEvb`;WG_d91U zdaMX19W`OQnoG8f0*(EiKZ(~}5KvB~#L8S8BCQWVu6caDiNgNON-+yZR4pHqJY$qx zJv7{pCx=wYE(*UrV)UD)`F_$C7vK-TbI696rGOGOhGsd+P-a54+=uL$K05?&=ct^BFpsTq5Hlk>s51Q(wsVj9{j2a#kC$r(`K^!~FbkPx zpI+@fw#2Cf(-|O4xw=LF&eajDu4YJB)_$;=hDT7#7XOG%R zVN-qgRu!)Svq=TTf(rKFD*d`q0ca|_lA?zvgK|HEcdi6Z@Bc~`0VIuDZ$Rwzeof;eQXQMmuB2-L@Y*MnyeE~(@WH++CO zhBr3K3byoBB$e7=$=S6~cilDeO9n;OWhwH;=v@Df{8oqsPvVRw0#=q=aBO8PUfQSO zVK;z_k*{FpaO9eX*deYa25Xi*vKqvgQ!E+YeGbhh&`~_lm*!oyI+{&j^9TsPB=Vw( zOO&SPwCIKKhdaIjbvc&n+~nFyKK9pwZoazeVqR`>va^2LvZ1Jnt^|t@>7x_YV|k7V zXLJ|fhgj@9`~nW7vOr{FjAcRejaD1=)0_$zO|q YDF?Z!A(-m#B%t%%ajufml;`I^0HZou;Q#;t diff --git a/substrabac/fixtures/chunantes/datasamples/train/0024307/LABEL_0024307.csv b/substrabac/fixtures/chunantes/datasamples/train/0024307/LABEL_0024307.csv new file mode 100755 index 000000000..ff746af51 --- /dev/null +++ b/substrabac/fixtures/chunantes/datasamples/train/0024307/LABEL_0024307.csv @@ -0,0 +1 @@ +0.0,1.0,0.0,0.0,0.0,0.0,0.0 diff --git a/substrabac/populate.py b/substrabac/populate.py index 171173782..a345b038a 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -179,26 +179,17 @@ def update_datamanager(data_manager_key, data, profile): train_data_sample_keys = [] if data_manager_org1_key: - print(f'register train data on datamanager {org_1} (will take datamanager creator as worker)') - data = { - 'paths': [ - os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024306.zip'), - ], - 'data_manager_keys': [data_manager_org1_key], - 'test_only': False, - } - train_data_sample_keys = create_asset(data, org_1, 'data_sample', True) - print(f'register train data (from server) on datamanager {org_1} (will take datamanager creator as worker)') - try: - shutil.copytree(os.path.join(dir_path, './fixtures/chunantes/datasamples/train/0024308'), - os.path.join(server_path, './fixtures/chunantes/datasamples/train/0024308')) - except FileExistsError: - pass + data_samples_path = ['./fixtures/chunantes/datasamples/train/0024306', + './fixtures/chunantes/datasamples/train/0024307'] + for d in data_samples_path: + try: + shutil.copytree(os.path.join(dir_path, d), + os.path.join(server_path, d)) + except FileExistsError: + pass data = { - 'paths': [ - os.path.join(server_path, './fixtures/chunantes/datasamples/train/0024308') - ], + 'paths': [os.path.join(server_path, d) for d in data_samples_path], 'data_manager_keys': [data_manager_org1_key], 'test_only': False, } From 53f387716016a77df2536f6ad15c227a70181356 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Wed, 24 Apr 2019 10:21:08 +0200 Subject: [PATCH 045/106] Algo does not expose dryrun anymore. --- substrabac/populate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index a345b038a..631e2159c 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -295,7 +295,7 @@ def update_datamanager(data_manager_key, data, profile): 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/description.md'), 'permissions': 'all', } - algo_key = create_asset(data, org_1, 'algo', True) + algo_key = create_asset(data, org_1, 'algo', False) #################################################### From 900415f2828ac4201b2ee3a5605fb990e4dafe98 Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Wed, 24 Apr 2019 16:20:12 +0200 Subject: [PATCH 046/106] Support dryrun on data paths --- substrabac/substrapp/views/datasample.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 5b93b62a8..5488a4db7 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -59,6 +59,8 @@ def compute_dryrun(self, data, data_manager_keys): # Name of the dry-run subtuple (not important) pkhash = data[0]['pkhash'] subtuple_directory = build_subtuple_folders({'key': pkhash}) + data_path = os.path.join(subtuple_directory, 'data') + volumes = {} try: @@ -66,9 +68,14 @@ def compute_dryrun(self, data, data_manager_keys): # uncompress only for file if 'file' in data_sample: try: - uncompress_path(data_sample['file'], os.path.join(subtuple_directory, 'data', data_sample['pkhash'])) + uncompress_path(data_sample['file'], os.path.join(data_path, data_sample['pkhash'])) except Exception as e: raise e + # for all data paths, we need to create symbolic links inside data_path + # and add real path to volume bind docker + elif 'path' in data_sample: + os.symlink(data_sample['path'], os.path.join(data_path, data_sample['pkhash'])) + volumes.update({data_sample['path']: {'bind': data_sample['path'], 'mode': 'ro'}}) for datamanager_key in data_manager_keys: datamanager = DataManager.objects.get(pk=datamanager_key) @@ -80,11 +87,10 @@ def compute_dryrun(self, data, data_manager_keys): data_docker = 'data_dry_run' # tag must be lowercase for docker data_docker_name = f'{data_docker}_{pkhash}_{uuid.uuid4().hex}' - data_path = os.path.join(subtuple_directory, 'data') - # TODO bind paths - volumes = {data_path: {'bind': '/sandbox/data', 'mode': 'rw'}, - opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}} + volumes.update({data_path: {'bind': '/sandbox/data', 'mode': 'rw'}, + opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}}) + client.images.build(path=data_sample_docker_path, tag=data_docker, From 236faea589b0c48a0b3ce854abc2b44ad37eb53f Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 25 Apr 2019 12:12:45 +0200 Subject: [PATCH 047/106] simplify populate with latest sdk --- substrabac/populate.py | 549 +++++++++++++++++------------------------ 1 file changed, 232 insertions(+), 317 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 631e2159c..e6f9b7d74 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -1,5 +1,4 @@ import argparse -import functools import os import json import shutil @@ -21,139 +20,47 @@ def setup_config(): client.create_config('chunantes', 'http://chunantes.substrabac:8001', '0.0') -def retry_until_success(f): - """Retry request to substrabac in case of Timeout.""" - @functools.wraps(f) - def wrapper(*args, **kwargs): - delay = 1 - backoff = 2 +def create_or_get(data, profile, asset, dryrun=False, many=False, + register=False): - while True: - try: - return f(*args, **kwargs) - except substra.exceptions.HTTPError as e: - print(colored(e, 'red')) - print(colored(e.response.content, 'red')) - print(f'Request error: retrying in {delay}s') - time.sleep(delay) - delay *= backoff - - return wrapper - - -def create_asset(data, profile, asset, dryrun=False): client.set_config(profile) + method = client.add if not register else client.register + if dryrun: print('dryrun') try: - r = client.add(asset, data, dryrun=True) - except substra.exceptions.HTTPError as e: - print(colored(e, 'red')) - else: - print(colored(json.dumps(r, indent=2), 'magenta')) - - print('real') - try: - r = client.add(asset, data) - except substra.exceptions.HTTPError as e: - if e.response.status_code == 408: - # retry until success in case of timeout - print(colored('got a 408, will test to get if from ledger', 'grey')) - r = e.response.json() - print(colored(json.dumps(r, indent=2), 'blue')) - results = r['pkhash'] if 'pkhash' in r else r['message'].get('pkhash') - - keys_to_check = results if isinstance(results, list) else [results] - for k in keys_to_check: - retry_until_success(client.get)(asset, k) - - return results - - elif e.response.status_code == 409: + r = method(asset, data, dryrun=True) + except substra.exceptions.AssetAlreadyExist as e: r = e.response.json() print(colored(json.dumps(r, indent=2), 'cyan')) - return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] - - else: - print(colored(e, 'red')) - try: - error = e.response.json() - except Exception: - error = e.response - else: - print(colored(error, 'red')) - else: - print(colored(json.dumps(r, indent=2), 'green')) - return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] - - -def register_asset(data, profile, asset, dryrun=False): - client.set_config(profile) - - if dryrun: - print('dryrun') - try: - r = client.register(asset, data, dryrun=True) - except substra.exceptions.HTTPError as e: - print(colored(e, 'red')) else: print(colored(json.dumps(r, indent=2), 'magenta')) print('real') try: - r = client.register(asset, data) - except substra.exceptions.HTTPError as e: - if e.response.status_code == 408: - # retry until success in case of timeout - print(colored('got a 408, will test to get if from ledger', 'grey')) - r = e.response.json() - print(colored(json.dumps(r, indent=2), 'blue')) - results = r['pkhash'] if 'pkhash' in r else r['message'].get('pkhash') + r = method(asset, data) - keys_to_check = results if isinstance(results, list) else [results] - for k in keys_to_check: - retry_until_success(client.get)(asset, k) - - return results - - elif e.response.status_code == 409: - r = e.response.json() - print(colored(json.dumps(r, indent=2), 'cyan')) - return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] + except substra.exceptions.AssetAlreadyExist as e: + r = e.response.json() + print(colored(json.dumps(r, indent=2), 'cyan')) + key_or_keys = e.pkhash - else: - print(colored(e, 'red')) - try: - error = e.response.json() - except Exception: - error = e.response - else: - print(colored(error, 'red')) else: print(colored(json.dumps(r, indent=2), 'green')) - return [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] + key_or_keys = [x['pkhash'] for x in r] if many else r['pkhash'] + + return key_or_keys def update_datamanager(data_manager_key, data, profile): client.set_config(profile) - - try: - r = client.update('data_manager', data_manager_key, data) - except substra.exceptions.HTTPError as e: - if e.response.status_code != 408: - print(colored(e, 'red')) - return None - - # retry until success in case of timeout - r = retry_until_success(client.get)('data_manager', data_manager_key) - print(colored(json.dumps(r, indent=2), 'cyan')) - + r = client.update('data_manager', data_manager_key, data) print(colored(json.dumps(r, indent=2), 'green')) return r['pkhash'] -if __name__ == '__main__': +def do_populate(): setup_config() parser = argparse.ArgumentParser() @@ -173,27 +80,26 @@ def update_datamanager(data_manager_key, data, profile): 'description': os.path.join(dir_path, './fixtures/chunantes/datamanagers/datamanager0/description.md'), 'permissions': 'all', } - data_manager_org1_key = create_asset(data, org_1, 'data_manager', dryrun=True) + data_manager_org1_key = create_or_get(data, org_1, 'data_manager', dryrun=True) #################################################### train_data_sample_keys = [] - if data_manager_org1_key: - print(f'register train data (from server) on datamanager {org_1} (will take datamanager creator as worker)') - data_samples_path = ['./fixtures/chunantes/datasamples/train/0024306', - './fixtures/chunantes/datasamples/train/0024307'] - for d in data_samples_path: - try: - shutil.copytree(os.path.join(dir_path, d), - os.path.join(server_path, d)) - except FileExistsError: - pass - data = { - 'paths': [os.path.join(server_path, d) for d in data_samples_path], - 'data_manager_keys': [data_manager_org1_key], - 'test_only': False, - } - train_data_sample_keys = register_asset(data, org_1, 'data_sample', True) + print(f'register train data (from server) on datamanager {org_1} (will take datamanager creator as worker)') + data_samples_path = ['./fixtures/chunantes/datasamples/train/0024306', + './fixtures/chunantes/datasamples/train/0024307'] + for d in data_samples_path: + try: + shutil.copytree(os.path.join(dir_path, d), + os.path.join(server_path, d)) + except FileExistsError: + pass + data = { + 'paths': [os.path.join(server_path, d) for d in data_samples_path], + 'data_manager_keys': [data_manager_org1_key], + 'test_only': False, + } + train_data_sample_keys = create_or_get(data, org_1, 'data_sample', dryrun=True, many=True, register=True) #################################################### @@ -205,197 +111,206 @@ def update_datamanager(data_manager_key, data, profile): 'description': os.path.join(dir_path, './fixtures/owkin/datamanagers/datamanager0/description.md'), 'permissions': 'all' } - data_manager_org0_key = create_asset(data, org_0, 'data_manager') + data_manager_org0_key = create_or_get(data, org_0, 'data_manager') + + #################################################### + + print('register test data') + data = { + 'paths': [ + os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024900.zip'), + os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024901.zip') + ], + 'data_manager_keys': [data_manager_org0_key], + 'test_only': True, + } + test_data_sample_keys = create_or_get(data, org_0, 'data_sample', many=True) + + #################################################### + + print('register test data 2') + data = { + 'paths': [ + os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024902.zip'), + os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024903.zip') + ], + 'data_manager_keys': [data_manager_org0_key], + 'test_only': True, + } + create_or_get(data, org_0, 'data_sample', many=True) + + #################################################### + + print('register test data 3') + data = { + 'paths': [ + os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024904.zip'), + os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024905.zip') + ], + 'data_manager_keys': [data_manager_org0_key], + 'test_only': True, + } + create_or_get(data, org_0, 'data_sample', many=True) + + #################################################### + + print('register objective') + data = { + 'name': 'Skin Lesion Classification Objective', + 'description': os.path.join(dir_path, './fixtures/chunantes/objectives/objective0/description.md'), + 'metrics_name': 'macro-average recall', + 'metrics': os.path.join(dir_path, './fixtures/chunantes/objectives/objective0/metrics.py'), + 'permissions': 'all', + 'test_data_sample_keys': test_data_sample_keys, + 'test_data_manager_key': data_manager_org0_key + } + + objective_key = create_or_get(data, org_0, 'objective', dryrun=True) + + #################################################### + + print('register objective without data manager and data sample') + data = { + 'name': 'Skin Lesion Classification Objective', + 'description': os.path.join(dir_path, './fixtures/owkin/objectives/objective0/description.md'), + 'metrics_name': 'macro-average recall', + 'metrics': os.path.join(dir_path, './fixtures/owkin/objectives/objective0/metrics.py'), + 'permissions': 'all' + } + + create_or_get(data, org_0, 'objective', dryrun=True) + + #################################################### + + # update datamanager + print('update datamanager') + data = { + 'objective_key': objective_key + } + update_datamanager(data_manager_org1_key, data, org_0) + + #################################################### + + # register algo + print('register algo') + data = { + 'name': 'Logistic regression', + 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/algo.tar.gz'), + 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/description.md'), + 'permissions': 'all', + } + algo_key = create_or_get(data, org_1, 'algo') + + #################################################### + + print('register algo 2') + data = { + 'name': 'Neural Network', + 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo0/algo.tar.gz'), + 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo0/description.md'), + 'permissions': 'all', + } + algo_key_2 = create_or_get(data, org_1, 'algo') + + #################################################### + + data = { + 'name': 'Random Forest', + 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo4/algo.tar.gz'), + 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo4/description.md'), + 'permissions': 'all', + } + algo_key_3 = create_or_get(data, org_1, 'algo') #################################################### - if data_manager_org0_key and data_manager_org1_key: - print('register test data') - data = { - 'paths': [ - os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024900.zip'), - os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024901.zip') - ], - 'data_manager_keys': [data_manager_org0_key], - 'test_only': True, - } - test_data_sample_keys = create_asset(data, org_0, 'data_sample', False) - - #################################################### - - print('register test data 2') - data = { - 'paths': [ - os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024902.zip'), - os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024903.zip') - ], - 'data_manager_keys': [data_manager_org0_key], - 'test_only': True, - } - test_data_sample_keys_2 = create_asset(data, org_0, 'data_sample', False) - - #################################################### - - print('register test data 3') - data = { - 'paths': [ - os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024904.zip'), - os.path.join(dir_path, './fixtures/owkin/datasamples/test/0024905.zip') - ], - 'data_manager_keys': [data_manager_org0_key], - 'test_only': True, - } - test_data_sample_keys_3 = create_asset(data, org_0, 'data_sample', False) - - #################################################### - - print('register objective') - data = { - 'name': 'Skin Lesion Classification Objective', - 'description': os.path.join(dir_path, './fixtures/chunantes/objectives/objective0/description.md'), - 'metrics_name': 'macro-average recall', - 'metrics': os.path.join(dir_path, './fixtures/chunantes/objectives/objective0/metrics.py'), - 'permissions': 'all', - 'test_data_sample_keys': test_data_sample_keys, - 'test_data_manager_key': data_manager_org0_key - } - - objective_key = create_asset(data, org_0, 'objective', True) - - #################################################### - - print('register objective without data manager and data sample') - data = { - 'name': 'Skin Lesion Classification Objective', - 'description': os.path.join(dir_path, './fixtures/owkin/objectives/objective0/description.md'), - 'metrics_name': 'macro-average recall', - 'metrics': os.path.join(dir_path, './fixtures/owkin/objectives/objective0/metrics.py'), - 'permissions': 'all' - } - - objective_key_test = create_asset(data, org_0, 'objective', True) - - #################################################### - - # update datamanager - print('update datamanager') - data = { - 'objective_key': objective_key - } - update_datamanager(data_manager_org1_key, data, org_0) - - #################################################### - - if objective_key: - # register algo - print('register algo') - data = { - 'name': 'Logistic regression', - 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/algo.tar.gz'), - 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/description.md'), - 'permissions': 'all', - } - algo_key = create_asset(data, org_1, 'algo', False) - - #################################################### - - print('register algo 2') - data = { - 'name': 'Neural Network', - 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo0/algo.tar.gz'), - 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo0/description.md'), - 'permissions': 'all', - } - algo_key_2 = create_asset(data, org_1, 'algo', False) - - #################################################### - - data = { - 'name': 'Random Forest', - 'file': os.path.join(dir_path, './fixtures/chunantes/algos/algo4/algo.tar.gz'), - 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo4/description.md'), - 'permissions': 'all', - } - algo_key_3 = create_asset(data, org_1, 'algo', False) - - #################################################### - - if algo_key and train_data_sample_keys: - # create traintuple - print('create traintuple') - data = { - 'algo_key': algo_key, - 'objective_key': objective_key, - 'data_manager_key': data_manager_org1_key, - 'train_data_sample_keys': train_data_sample_keys, - 'tag': 'substra' - } - traintuple_key = create_asset(data, org_1, 'traintuple') - - print('create second traintuple') - data = { - 'algo_key': algo_key_2, - 'data_manager_key': data_manager_org1_key, - 'objective_key': objective_key, - 'train_data_sample_keys': train_data_sample_keys, - 'tag': 'My super tag' - } - - traintuple_key_2 = create_asset(data, org_1, 'traintuple') - - print('create third traintuple') - data = { - 'algo_key': algo_key_3, - 'data_manager_key': data_manager_org1_key, - 'objective_key': objective_key, - 'train_data_sample_keys': train_data_sample_keys, - } - - traintuple_key_3 = create_asset(data, org_1, 'traintuple') - - #################################################### - - if traintuple_key: - client.set_config(org_1) - res = client.get('traintuple', traintuple_key) - print(colored(json.dumps(res, indent=2), 'green')) - - # create testtuple - print('create testtuple') - data = { - 'traintuple_key': traintuple_key - } - - testtuple_key = create_asset(data, org_1, 'testtuple') - # testtuple_key = None - - if testtuple_key: - client.set_config(org_1) - res_t = client.get('testtuple', testtuple_key) - print(colored(json.dumps(res_t, indent=2), 'yellow')) - - while res['status'] not in ('done', 'failed') or res_t['status'] not in ('done', 'failed'): - print('-' * 100) - try: - client.set_config(org_1) - res = client.get('traintuple', traintuple_key) - print(colored(json.dumps(res, indent=2), 'green')) - - res_t = client.get('testtuple', testtuple_key) - print(colored(json.dumps(res_t, indent=2), 'yellow')) - except substra.exceptions.SDKException: - print(colored('Error when getting subtuples', 'red')) - time.sleep(3) - - else: - while res['status'] not in ('done', 'failed'): - print('-' * 100) - try: - client.set_config(org_1) - res = client.get('traintuple', traintuple_key) - print(colored(json.dumps(res, indent=2), 'green')) - except substra.exceptions.SDKException: - print(colored('Error when getting subtuple', 'red')) - time.sleep(3) - - print('Testtuple create failed') + # create traintuple + print('create traintuple') + data = { + 'algo_key': algo_key, + 'objective_key': objective_key, + 'data_manager_key': data_manager_org1_key, + 'train_data_sample_keys': train_data_sample_keys, + 'tag': 'substra' + } + traintuple_key = create_or_get(data, org_1, 'traintuple') + + print('create second traintuple') + data = { + 'algo_key': algo_key_2, + 'data_manager_key': data_manager_org1_key, + 'objective_key': objective_key, + 'train_data_sample_keys': train_data_sample_keys, + 'tag': 'My super tag' + } + + create_or_get(data, org_1, 'traintuple') + + print('create third traintuple') + data = { + 'algo_key': algo_key_3, + 'data_manager_key': data_manager_org1_key, + 'objective_key': objective_key, + 'train_data_sample_keys': train_data_sample_keys, + } + + create_or_get(data, org_1, 'traintuple') + + #################################################### + + client.set_config(org_1) + res = client.get('traintuple', traintuple_key) + print(colored(json.dumps(res, indent=2), 'green')) + + # create testtuple + print('create testtuple') + data = { + 'traintuple_key': traintuple_key + } + + testtuple_key = create_or_get(data, org_1, 'testtuple') + + if testtuple_key: + client.set_config(org_1) + res_t = client.get('testtuple', testtuple_key) + print(colored(json.dumps(res_t, indent=2), 'yellow')) + + while res['status'] not in ('done', 'failed') or res_t['status'] not in ('done', 'failed'): + print('-' * 100) + try: + client.set_config(org_1) + res = client.get('traintuple', traintuple_key) + print(colored(json.dumps(res, indent=2), 'green')) + + res_t = client.get('testtuple', testtuple_key) + print(colored(json.dumps(res_t, indent=2), 'yellow')) + except substra.exceptions.SDKException: + print(colored('Error when getting subtuples', 'red')) + time.sleep(3) + + else: + while res['status'] not in ('done', 'failed'): + print('-' * 100) + try: + client.set_config(org_1) + res = client.get('traintuple', traintuple_key) + print(colored(json.dumps(res, indent=2), 'green')) + except substra.exceptions.SDKException: + print(colored('Error when getting subtuple', 'red')) + time.sleep(3) + + print('Testtuple create failed') + + +if __name__ == '__main__': + try: + do_populate() + except substra.exceptions.HTTPError as e: + try: + error = e.response.json() + except Exception: + error_message = e.response.text + else: + error_message = json.dumps(error, indent=2) + print(colored(str(e), 'red')) + print(colored(error_message, 'red')) From 1c43f4728dd27c9c02f2b06fba8c100562f393d2 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 25 Apr 2019 13:31:28 +0200 Subject: [PATCH 048/106] fix update manager --- substrabac/populate.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index e6f9b7d74..fe29908ec 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -55,9 +55,22 @@ def create_or_get(data, profile, asset, dryrun=False, many=False, def update_datamanager(data_manager_key, data, profile): client.set_config(profile) - r = client.update('data_manager', data_manager_key, data) - print(colored(json.dumps(r, indent=2), 'green')) - return r['pkhash'] + try: + r = client.update('data_manager', data_manager_key, data) + + except substra.exceptions.AssetAlreadyExist as e: + r = e.response.json() + print(colored(json.dumps(r, indent=2), 'cyan')) + + except substra.exceptions.InvalidRequest as e: + # FIXME if the data manager is already associated with the objective + # backend answer with a 400 and a raw error coming from the + # ledger. + # this case will be handled soon, with the fabric SDK. + print(colored(str(e), 'red')) + + else: + print(colored(json.dumps(r, indent=2), 'green')) def do_populate(): From e4996dfa626ff4cee156a279d6a056275eed4651 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 25 Apr 2019 13:48:09 +0200 Subject: [PATCH 049/106] rename SDK exceptions --- substrabac/populate.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index fe29908ec..821eb6134 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -31,7 +31,7 @@ def create_or_get(data, profile, asset, dryrun=False, many=False, print('dryrun') try: r = method(asset, data, dryrun=True) - except substra.exceptions.AssetAlreadyExist as e: + except substra.exceptions.AlreadyExists as e: r = e.response.json() print(colored(json.dumps(r, indent=2), 'cyan')) else: @@ -41,7 +41,7 @@ def create_or_get(data, profile, asset, dryrun=False, many=False, try: r = method(asset, data) - except substra.exceptions.AssetAlreadyExist as e: + except substra.exceptions.AlreadyExists as e: r = e.response.json() print(colored(json.dumps(r, indent=2), 'cyan')) key_or_keys = e.pkhash @@ -58,7 +58,7 @@ def update_datamanager(data_manager_key, data, profile): try: r = client.update('data_manager', data_manager_key, data) - except substra.exceptions.AssetAlreadyExist as e: + except substra.exceptions.AlreadyExists as e: r = e.response.json() print(colored(json.dumps(r, indent=2), 'cyan')) From c9294d390c46888a3c6225baf5c7143fdf42971a Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 26 Apr 2019 16:28:22 +0200 Subject: [PATCH 050/106] Simplify check train and test tuple --- substrabac/populate.py | 45 ++++++++++++++++++------------------------ 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 821eb6134..f1eecd868 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -283,36 +283,29 @@ def do_populate(): testtuple_key = create_or_get(data, org_1, 'testtuple') - if testtuple_key: - client.set_config(org_1) - res_t = client.get('testtuple', testtuple_key) - print(colored(json.dumps(res_t, indent=2), 'yellow')) + client.set_config(org_1) + res_t = client.get('testtuple', testtuple_key) + print(colored(json.dumps(res_t, indent=2), 'yellow')) - while res['status'] not in ('done', 'failed') or res_t['status'] not in ('done', 'failed'): - print('-' * 100) - try: - client.set_config(org_1) - res = client.get('traintuple', traintuple_key) - print(colored(json.dumps(res, indent=2), 'green')) + testtuple_status = None + traintuple_status = None - res_t = client.get('testtuple', testtuple_key) - print(colored(json.dumps(res_t, indent=2), 'yellow')) - except substra.exceptions.SDKException: - print(colored('Error when getting subtuples', 'red')) - time.sleep(3) + client.set_config(org_1) - else: - while res['status'] not in ('done', 'failed'): + while traintuple_status not in ('done', 'failed') or testtuple_status not in ('done', 'failed'): + res = client.get('traintuple', traintuple_key) + res_t = client.get('testtuple', testtuple_key) + if traintuple_status != res['status'] or testtuple_status != res_t['status']: + traintuple_status = res['status'] + testtuple_status = res_t['status'] + print('') print('-' * 100) - try: - client.set_config(org_1) - res = client.get('traintuple', traintuple_key) - print(colored(json.dumps(res, indent=2), 'green')) - except substra.exceptions.SDKException: - print(colored('Error when getting subtuple', 'red')) - time.sleep(3) - - print('Testtuple create failed') + print(colored(json.dumps(res, indent=2), 'green')) + print(colored(json.dumps(res_t, indent=2), 'yellow')) + else: + print('.', end='', flush=True) + + time.sleep(3) if __name__ == '__main__': From fd43fcbc244b3555b72f919493c09adda64cbb11 Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 29 Apr 2019 11:03:22 +0200 Subject: [PATCH 051/106] rename create_or_get to get_or_create --- substrabac/populate.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index f1eecd868..51637bfe0 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -20,7 +20,7 @@ def setup_config(): client.create_config('chunantes', 'http://chunantes.substrabac:8001', '0.0') -def create_or_get(data, profile, asset, dryrun=False, many=False, +def get_or_create(data, profile, asset, dryrun=False, many=False, register=False): client.set_config(profile) @@ -93,7 +93,7 @@ def do_populate(): 'description': os.path.join(dir_path, './fixtures/chunantes/datamanagers/datamanager0/description.md'), 'permissions': 'all', } - data_manager_org1_key = create_or_get(data, org_1, 'data_manager', dryrun=True) + data_manager_org1_key = get_or_create(data, org_1, 'data_manager', dryrun=True) #################################################### @@ -112,7 +112,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org1_key], 'test_only': False, } - train_data_sample_keys = create_or_get(data, org_1, 'data_sample', dryrun=True, many=True, register=True) + train_data_sample_keys = get_or_create(data, org_1, 'data_sample', dryrun=True, many=True, register=True) #################################################### @@ -124,7 +124,7 @@ def do_populate(): 'description': os.path.join(dir_path, './fixtures/owkin/datamanagers/datamanager0/description.md'), 'permissions': 'all' } - data_manager_org0_key = create_or_get(data, org_0, 'data_manager') + data_manager_org0_key = get_or_create(data, org_0, 'data_manager') #################################################### @@ -137,7 +137,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org0_key], 'test_only': True, } - test_data_sample_keys = create_or_get(data, org_0, 'data_sample', many=True) + test_data_sample_keys = get_or_create(data, org_0, 'data_sample', many=True) #################################################### @@ -150,7 +150,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org0_key], 'test_only': True, } - create_or_get(data, org_0, 'data_sample', many=True) + get_or_create(data, org_0, 'data_sample', many=True) #################################################### @@ -163,7 +163,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org0_key], 'test_only': True, } - create_or_get(data, org_0, 'data_sample', many=True) + get_or_create(data, org_0, 'data_sample', many=True) #################################################### @@ -178,7 +178,7 @@ def do_populate(): 'test_data_manager_key': data_manager_org0_key } - objective_key = create_or_get(data, org_0, 'objective', dryrun=True) + objective_key = get_or_create(data, org_0, 'objective', dryrun=True) #################################################### @@ -191,7 +191,7 @@ def do_populate(): 'permissions': 'all' } - create_or_get(data, org_0, 'objective', dryrun=True) + get_or_create(data, org_0, 'objective', dryrun=True) #################################################### @@ -212,7 +212,7 @@ def do_populate(): 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/description.md'), 'permissions': 'all', } - algo_key = create_or_get(data, org_1, 'algo') + algo_key = get_or_create(data, org_1, 'algo') #################################################### @@ -223,7 +223,7 @@ def do_populate(): 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo0/description.md'), 'permissions': 'all', } - algo_key_2 = create_or_get(data, org_1, 'algo') + algo_key_2 = get_or_create(data, org_1, 'algo') #################################################### @@ -233,7 +233,7 @@ def do_populate(): 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo4/description.md'), 'permissions': 'all', } - algo_key_3 = create_or_get(data, org_1, 'algo') + algo_key_3 = get_or_create(data, org_1, 'algo') #################################################### @@ -246,7 +246,7 @@ def do_populate(): 'train_data_sample_keys': train_data_sample_keys, 'tag': 'substra' } - traintuple_key = create_or_get(data, org_1, 'traintuple') + traintuple_key = get_or_create(data, org_1, 'traintuple') print('create second traintuple') data = { @@ -257,7 +257,7 @@ def do_populate(): 'tag': 'My super tag' } - create_or_get(data, org_1, 'traintuple') + get_or_create(data, org_1, 'traintuple') print('create third traintuple') data = { @@ -267,7 +267,7 @@ def do_populate(): 'train_data_sample_keys': train_data_sample_keys, } - create_or_get(data, org_1, 'traintuple') + get_or_create(data, org_1, 'traintuple') #################################################### @@ -281,7 +281,7 @@ def do_populate(): 'traintuple_key': traintuple_key } - testtuple_key = create_or_get(data, org_1, 'testtuple') + testtuple_key = get_or_create(data, org_1, 'testtuple') client.set_config(org_1) res_t = client.get('testtuple', testtuple_key) From 36c2eceb0737f6ab2754ba32ee54c951760557c5 Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 29 Apr 2019 11:06:36 +0200 Subject: [PATCH 052/106] populate remove many parameter --- substrabac/populate.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 51637bfe0..7f1f18cb5 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -20,8 +20,7 @@ def setup_config(): client.create_config('chunantes', 'http://chunantes.substrabac:8001', '0.0') -def get_or_create(data, profile, asset, dryrun=False, many=False, - register=False): +def get_or_create(data, profile, asset, dryrun=False, register=False): client.set_config(profile) @@ -48,7 +47,7 @@ def get_or_create(data, profile, asset, dryrun=False, many=False, else: print(colored(json.dumps(r, indent=2), 'green')) - key_or_keys = [x['pkhash'] for x in r] if many else r['pkhash'] + key_or_keys = [x['pkhash'] for x in r] if isinstance(r, list) else r['pkhash'] return key_or_keys @@ -112,7 +111,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org1_key], 'test_only': False, } - train_data_sample_keys = get_or_create(data, org_1, 'data_sample', dryrun=True, many=True, register=True) + train_data_sample_keys = get_or_create(data, org_1, 'data_sample', dryrun=True, register=True) #################################################### @@ -137,7 +136,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org0_key], 'test_only': True, } - test_data_sample_keys = get_or_create(data, org_0, 'data_sample', many=True) + test_data_sample_keys = get_or_create(data, org_0, 'data_sample') #################################################### @@ -150,7 +149,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org0_key], 'test_only': True, } - get_or_create(data, org_0, 'data_sample', many=True) + get_or_create(data, org_0, 'data_sample') #################################################### @@ -163,7 +162,7 @@ def do_populate(): 'data_manager_keys': [data_manager_org0_key], 'test_only': True, } - get_or_create(data, org_0, 'data_sample', many=True) + get_or_create(data, org_0, 'data_sample') #################################################### From 1243f382eb1f43fc1cdd38b4d40abcb90ad249db Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 29 Apr 2019 18:09:13 +0200 Subject: [PATCH 053/106] Clean celery config with broker. --- docker/celerybeat/Dockerfile | 3 -- docker/celeryworker/Dockerfile | 2 - docker/start.py | 47 ++++++++++++++++++------ docker/substrabac/Dockerfile | 2 - substrabac/substrabac/celery.py | 2 +- substrabac/substrabac/settings/common.py | 2 + substrabac/substrabac/settings/dev.py | 3 ++ 7 files changed, 41 insertions(+), 20 deletions(-) diff --git a/docker/celerybeat/Dockerfile b/docker/celerybeat/Dockerfile index 023732885..144cdad4f 100644 --- a/docker/celerybeat/Dockerfile +++ b/docker/celerybeat/Dockerfile @@ -14,6 +14,3 @@ RUN pip3 install -r requirements.txt COPY ./substrabac/libs /usr/src/app/libs COPY ./substrabac/substrapp /usr/src/app/substrapp COPY ./substrabac/substrabac /usr/src/app/substrabac - -RUN sed -i 's/localhost/rabbit/g' /usr/src/app/substrabac/celery.py - diff --git a/docker/celeryworker/Dockerfile b/docker/celeryworker/Dockerfile index 0322d7966..6bf39448d 100644 --- a/docker/celeryworker/Dockerfile +++ b/docker/celeryworker/Dockerfile @@ -20,5 +20,3 @@ COPY ./substrabac/fake_metrics /usr/src/app/fake_metrics COPY ./substrabac/fake_data_sample /usr/src/app/fake_data_sample COPY ./substrabac/substrapp /usr/src/app/substrapp COPY ./substrabac/substrabac /usr/src/app/substrabac - -RUN sed -i 's/localhost/rabbit/g' /usr/src/app/substrabac/celery.py diff --git a/docker/start.py b/docker/start.py index 33dad9702..61f7cf18a 100644 --- a/docker/start.py +++ b/docker/start.py @@ -14,6 +14,24 @@ def generate_docker_compose_file(conf, launch_settings): + + # POSTGRES + POSTGRES_USER = 'substrabac' + USER = 'substrabac' + POSTGRES_PASSWORD = 'substrabac' + POSTGRES_DB = 'substrabac' + + # RABBITMQ + RABBITMQ_DEFAULT_USER = 'guest' + RABBITMQ_DEFAULT_PASS = 'guest' + RABBITMQ_HOSTNAME = 'rabbitmq' + RABBITMQ_NODENAME = 'rabbitmq' + RABBITMQ_DOMAIN = 'rabbit' + RABBITMQ_PORT = '5672' + + # CELERY + CELERY_BROKER_URL = f'amqp://{RABBITMQ_DEFAULT_USER}:{RABBITMQ_DEFAULT_PASS}@{RABBITMQ_DOMAIN}:{RABBITMQ_PORT}//' + try: from ruamel import yaml except ImportError: @@ -24,10 +42,10 @@ def generate_docker_compose_file(conf, launch_settings): 'substrabac_tools': {'postgresql': {'container_name': 'postgresql', 'image': 'library/postgres:10.5', 'restart': 'unless-stopped', - 'environment': ['POSTGRES_USER=substrabac', - 'USER=substrabac', - 'POSTGRES_PASSWORD=substrabac', - 'POSTGRES_DB=substrabac'], + 'environment': [f'POSTGRES_USER={POSTGRES_USER}', + f'USER={USER}', + f'POSTGRES_PASSWORD={POSTGRES_PASSWORD}', + f'POSTGRES_DB={POSTGRES_DB}'], 'volumes': [ '/substra/backup/postgres-data:/var/lib/postgresql/data', f'{dir_path}/postgresql/init.sh:/docker-entrypoint-initdb.d/init.sh'], @@ -36,9 +54,10 @@ def generate_docker_compose_file(conf, launch_settings): 'hostname': 'celerybeat', 'image': 'substra/celerybeat', 'restart': 'unless-stopped', - 'command': '/bin/bash -c "while ! { nc -z rabbit 5672 2>&1; }; do sleep 1; done; while ! { nc -z postgresql 5432 2>&1; }; do sleep 1; done; celery -A substrabac beat -l info -b rabbit"', + 'command': '/bin/bash -c "while ! { nc -z rabbit 5672 2>&1; }; do sleep 1; done; while ! { nc -z postgresql 5432 2>&1; }; do sleep 1; done; celery -A substrabac beat -l info"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': ['PYTHONUNBUFFERED=1', + f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.common'], 'depends_on': ['postgresql', 'rabbit'] }, @@ -46,10 +65,10 @@ def generate_docker_compose_file(conf, launch_settings): 'hostname': 'rabbitmq', # Must be set to be able to recover from volume 'restart': 'unless-stopped', 'image': 'rabbitmq:3', - 'environment': ['RABBITMQ_DEFAULT_USER=guest', - 'RABBITMQ_DEFAULT_PASS=guest', - 'HOSTNAME=rabbitmq', - 'RABBITMQ_NODENAME=rabbitmq'], + 'environment': [f'RABBITMQ_DEFAULT_USER={RABBITMQ_DEFAULT_USER}', + f'RABBITMQ_DEFAULT_PASS={RABBITMQ_DEFAULT_PASS}', + f'HOSTNAME={RABBITMQ_HOSTNAME}', + f'RABBITMQ_NODENAME={RABBITMQ_NODENAME}'], 'volumes': ['/substra/backup/rabbit-data:/var/lib/rabbitmq'] }, }, @@ -73,6 +92,7 @@ def generate_docker_compose_file(conf, launch_settings): 'command': f'/bin/bash -c "while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; yes | python manage.py migrate --settings=substrabac.settings.{launch_settings}; python3 manage.py collectstatic --noinput; python3 manage.py runserver 0.0.0.0:{port}"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': ['DATABASE_HOST=postgresql', + f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', @@ -100,11 +120,12 @@ def generate_docker_compose_file(conf, launch_settings): 'hostname': f'{org_name}.scheduler', 'image': 'substra/celeryworker', 'restart': 'unless-stopped', - 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},scheduler,celery -b rabbit --hostname {org_name}.scheduler"', + 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},scheduler,celery --hostname {org_name}.scheduler"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': [f'ORG={org_name_stripped}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', + f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', 'PYTHONUNBUFFERED=1', f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", @@ -127,11 +148,12 @@ def generate_docker_compose_file(conf, launch_settings): 'hostname': f'{org_name}.worker', 'image': 'substra/celeryworker', 'restart': 'unless-stopped', - 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},{org_name}.worker,celery -b rabbit --hostname {org_name}.worker"', + 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},{org_name}.worker,celery --hostname {org_name}.worker"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': [f'ORG={org_name_stripped}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', + f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', 'PYTHONUNBUFFERED=1', f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", @@ -157,11 +179,12 @@ def generate_docker_compose_file(conf, launch_settings): 'hostname': f'{org_name}.dryrunner', 'image': 'substra/celeryworker', 'restart': 'unless-stopped', - 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},{org_name}.dryrunner,celery -b rabbit --hostname {org_name}.dryrunner"', + 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},{org_name}.dryrunner,celery --hostname {org_name}.dryrunner"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': [f'ORG={org_name_stripped}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', + f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', 'PYTHONUNBUFFERED=1', f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", diff --git a/docker/substrabac/Dockerfile b/docker/substrabac/Dockerfile index 4d0ae185c..053c45257 100644 --- a/docker/substrabac/Dockerfile +++ b/docker/substrabac/Dockerfile @@ -18,5 +18,3 @@ COPY ./substrabac/manage.py /usr/src/app/manage.py COPY ./substrabac/libs /usr/src/app/libs COPY ./substrabac/substrapp /usr/src/app/substrapp COPY ./substrabac/substrabac /usr/src/app/substrabac - -RUN sed -i 's/localhost/rabbit/g' /usr/src/app/substrabac/celery.py diff --git a/substrabac/substrabac/celery.py b/substrabac/substrabac/celery.py index dbd7e85b9..faf074883 100644 --- a/substrabac/substrabac/celery.py +++ b/substrabac/substrabac/celery.py @@ -6,7 +6,7 @@ # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'substrabac.settings.prod') -app = Celery('substrabac', broker='amqp://localhost:5672//') +app = Celery('substrabac') # Using a string here means the worker doesn't have to serialize # the configuration object to child processes. diff --git a/substrabac/substrabac/settings/common.py b/substrabac/substrabac/settings/common.py index 1f9123c2e..6598755b5 100644 --- a/substrabac/substrabac/settings/common.py +++ b/substrabac/substrabac/settings/common.py @@ -158,3 +158,5 @@ CELERY_TASK_SERIALIZER = 'json' CELERY_TASK_TRACK_STARTED = True # since 4.0 CELERY_WORKER_CONCURRENCY = 1 + +CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'amqp://localhost:5672//'), diff --git a/substrabac/substrabac/settings/dev.py b/substrabac/substrabac/settings/dev.py index 83f45e043..7c3eea1b6 100644 --- a/substrabac/substrabac/settings/dev.py +++ b/substrabac/substrabac/settings/dev.py @@ -43,6 +43,9 @@ SITE_HOST = f'{ORG_NAME}.substrabac' SITE_PORT = DEFAULT_PORT +STATIC_URL = '/static/' +STATIC_ROOT = os.path.join(BASE_DIR, 'statics') + LOGGING = { 'version': 1, 'disable_existing_loggers': False, From 8d6740af8d66f1291c5c65c3023a959e90821e41 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 30 Apr 2019 10:11:12 +0200 Subject: [PATCH 054/106] Remove static in dev. --- docker/start.py | 4 +++- substrabac/substrabac/settings/dev.py | 3 --- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/docker/start.py b/docker/start.py index 61f7cf18a..f76ef3b5e 100644 --- a/docker/start.py +++ b/docker/start.py @@ -85,11 +85,13 @@ def generate_docker_compose_file(conf, launch_settings): if org_name_stripped == 'chunantes': port = 8001 + static = 'python3 manage.py collectstatic --noinput' if launch_settings == 'prod' else 'echo \'No static\'' + backend = {'container_name': f'{org_name_stripped}.substrabac', 'image': 'substra/substrabac', 'restart': 'unless-stopped', 'ports': [f'{port}:{port}'], - 'command': f'/bin/bash -c "while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; yes | python manage.py migrate --settings=substrabac.settings.{launch_settings}; python3 manage.py collectstatic --noinput; python3 manage.py runserver 0.0.0.0:{port}"', + 'command': f'/bin/bash -c "while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; yes | python manage.py migrate; {static}; python3 manage.py runserver 0.0.0.0:{port}"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': ['DATABASE_HOST=postgresql', f'CELERY_BROKER_URL={CELERY_BROKER_URL}', diff --git a/substrabac/substrabac/settings/dev.py b/substrabac/substrabac/settings/dev.py index 7c3eea1b6..83f45e043 100644 --- a/substrabac/substrabac/settings/dev.py +++ b/substrabac/substrabac/settings/dev.py @@ -43,9 +43,6 @@ SITE_HOST = f'{ORG_NAME}.substrabac' SITE_PORT = DEFAULT_PORT -STATIC_URL = '/static/' -STATIC_ROOT = os.path.join(BASE_DIR, 'statics') - LOGGING = { 'version': 1, 'disable_existing_loggers': False, From 0f389f709dc16693b1b81e602b3ac050e6728c62 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 30 Apr 2019 11:06:12 +0200 Subject: [PATCH 055/106] Update GPUtil version to 1.4.0 instead of git install. --- substrabac/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrabac/requirements.txt b/substrabac/requirements.txt index 848a770e1..352fdbc5a 100644 --- a/substrabac/requirements.txt +++ b/substrabac/requirements.txt @@ -11,7 +11,7 @@ django-rest-swagger==2.1.2 djangorestframework==3.8.2 docker == 3.5.0 grpcio >= 1.0.1 -git+https://github.com/anderskm/gputil.git@7e32706a2674935ae45f2fcf389ae1ea463b0b6b +GPUtil == 1.4.0 hkdf >= 0.0.3 ipython==6.4.0 ipython-genutils==0.2.0 From b527dcfc7dd20bc8888599aac5f39dea2b03975e Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 30 Apr 2019 16:19:58 +0200 Subject: [PATCH 056/106] Add uwsgi for prod. --- docker/start.py | 8 ++++++-- substrabac/requirements.txt | 1 + 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/docker/start.py b/docker/start.py index f76ef3b5e..ec084a587 100644 --- a/docker/start.py +++ b/docker/start.py @@ -85,13 +85,17 @@ def generate_docker_compose_file(conf, launch_settings): if org_name_stripped == 'chunantes': port = 8001 - static = 'python3 manage.py collectstatic --noinput' if launch_settings == 'prod' else 'echo \'No static\'' + if launch_settings == 'prod': + django_server = f'python3 manage.py collectstatic --noinput; uwsgi --http :{port} --module substrabac.wsgi --static-map /static=/usr/src/app/substrabac/statics --master' + else: + + django_server = f'python3 manage.py runserver 0.0.0.0:{port}' backend = {'container_name': f'{org_name_stripped}.substrabac', 'image': 'substra/substrabac', 'restart': 'unless-stopped', 'ports': [f'{port}:{port}'], - 'command': f'/bin/bash -c "while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; yes | python manage.py migrate; {static}; python3 manage.py runserver 0.0.0.0:{port}"', + 'command': f'/bin/bash -c "while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; yes | python manage.py migrate; {django_server}"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': ['DATABASE_HOST=postgresql', f'CELERY_BROKER_URL={CELERY_BROKER_URL}', diff --git a/substrabac/requirements.txt b/substrabac/requirements.txt index 352fdbc5a..53fd65156 100644 --- a/substrabac/requirements.txt +++ b/substrabac/requirements.txt @@ -25,3 +25,4 @@ requests == 2.20.0 rx >= 1.5.3 sentry-sdk == 0.5.2 six >= 1.4.0 +uwsgi == 2.0.18 From 1ddb55119f3e49f9004c633fb142de5608c3a621 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 2 May 2019 09:55:41 +0200 Subject: [PATCH 057/106] Static Storage Address URL --- docker/start.py | 5 ++++- substrabac/substrabac/settings/dev.py | 2 ++ substrabac/substrabac/settings/prod.py | 2 ++ substrabac/substrapp/tasks.py | 5 ++--- substrabac/substrapp/tests/tests_views.py | 2 ++ substrabac/substrapp/views/datasample.py | 9 ++------- substrabac/substrapp/views/objective.py | 8 ++------ 7 files changed, 16 insertions(+), 17 deletions(-) diff --git a/docker/start.py b/docker/start.py index f76ef3b5e..30630d90b 100644 --- a/docker/start.py +++ b/docker/start.py @@ -224,7 +224,10 @@ def generate_docker_compose_file(conf, launch_settings): dryrunner['environment'].append(dryrun_root) backend['environment'].append(dryrun_root) else: - scheduler['environment'].append(f"RAVEN_URL={raven_scheduler_url}",) + default_domain = os.environ.get('SUBSTRABAC_DEFAULT_DOMAIN', '') + if default_domain: + backend['environment'].append(f"DEFAULT_DOMAIN={default_domain}") + scheduler['environment'].append(f"RAVEN_URL={raven_scheduler_url}") worker['environment'].append(f"RAVEN_URL={raven_worker_url}") dryrunner['environment'].append(f"RAVEN_URL={raven_dryrunner_url}") diff --git a/substrabac/substrabac/settings/dev.py b/substrabac/substrabac/settings/dev.py index 83f45e043..f9c580123 100644 --- a/substrabac/substrabac/settings/dev.py +++ b/substrabac/substrabac/settings/dev.py @@ -43,6 +43,8 @@ SITE_HOST = f'{ORG_NAME}.substrabac' SITE_PORT = DEFAULT_PORT +DEFAULT_DOMAIN = os.environ.get('DEFAULT_DOMAIN', f'http://{SITE_HOST}:{SITE_PORT}') + LOGGING = { 'version': 1, 'disable_existing_loggers': False, diff --git a/substrabac/substrabac/settings/prod.py b/substrabac/substrabac/settings/prod.py index ad6acb1c4..40cb1fee6 100644 --- a/substrabac/substrabac/settings/prod.py +++ b/substrabac/substrabac/settings/prod.py @@ -43,6 +43,8 @@ SITE_HOST = os.environ.get('SITE_HOST', f'{ORG_NAME}.substrabac') SITE_PORT = os.environ.get('SITE_PORT', DEFAULT_PORT) +DEFAULT_DOMAIN = os.environ.get('DEFAULT_DOMAIN', f'http://{SITE_HOST}:{SITE_PORT}') + STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, 'statics') diff --git a/substrabac/substrapp/tasks.py b/substrabac/substrapp/tasks.py index 9bc381b74..7c405a176 100644 --- a/substrabac/substrapp/tasks.py +++ b/substrabac/substrapp/tasks.py @@ -458,9 +458,8 @@ def doTask(subtuple, tuple_type): with open(end_model_path, 'rb') as f: instance.file.save('model', f) - url_http = 'http' if settings.DEBUG else 'https' - current_site = f'{getattr(settings, "SITE_HOST")}:{getattr(settings, "SITE_PORT")}' - end_model_file = f'{url_http}://{current_site}{reverse("substrapp:model-file", args=[end_model_file_hash])}' + current_site = getattr(settings, "DEFAULT_DOMAIN") + end_model_file = f'{current_site}{reverse("substrapp:model-file", args=[end_model_file_hash])}' # compute metric task metrics_path = path.join(getattr(settings, 'PROJECT_ROOT'), 'base_metrics') # base metrics comes with substrabac diff --git a/substrabac/substrapp/tests/tests_views.py b/substrabac/substrapp/tests/tests_views.py index 562f680bf..49ae428cb 100644 --- a/substrabac/substrapp/tests/tests_views.py +++ b/substrabac/substrapp/tests/tests_views.py @@ -72,6 +72,7 @@ def test_utils_getObjectFromLedger(self): @override_settings(DRYRUN_ROOT=MEDIA_ROOT) @override_settings(SITE_HOST='localhost') @override_settings(LEDGER={'name': 'test-org', 'peer': 'test-peer'}) +@override_settings(DEFAULT_DOMAIN='https://localhost') class ObjectiveViewTests(APITestCase): def setUp(self): @@ -1003,6 +1004,7 @@ def test_task_retrieve_pending(self): @override_settings(DRYRUN_ROOT=MEDIA_ROOT) @override_settings(SITE_HOST='localhost') @override_settings(LEDGER={'name': 'test-org', 'peer': 'test-peer'}) +@override_settings(DEFAULT_DOMAIN='https://localhost') class DataViewTests(APITestCase): def setUp(self): diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 5488a4db7..790fdbf3d 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -91,7 +91,6 @@ def compute_dryrun(self, data, data_manager_keys): volumes.update({data_path: {'bind': '/sandbox/data', 'mode': 'rw'}, opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}}) - client.images.build(path=data_sample_docker_path, tag=data_docker, rm=False) @@ -138,12 +137,8 @@ class DataSampleViewSet(mixins.CreateModelMixin, def dryrun_task(self, data, data_manager_keys): task = compute_dryrun.apply_async((data, data_manager_keys), queue=f"{settings.LEDGER['name']}.dryrunner") - url_http = 'http' if settings.DEBUG else 'https' - site_port = getattr(settings, "SITE_PORT", None) - current_site = f'{getattr(settings, "SITE_HOST")}' - if site_port: - current_site = f'{current_site}:{site_port}' - task_route = f'{url_http}://{current_site}{reverse("substrapp:task-detail", args=[task.id])}' + current_site = getattr(settings, "DEFAULT_DOMAIN") + task_route = f'{current_site}{reverse("substrapp:task-detail", args=[task.id])}' return task, f'Your dry-run has been taken in account. You can follow the task execution on {task_route}' @staticmethod diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 7ffbe6e7f..c7548a266 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -168,12 +168,8 @@ def create(self, request, *args, **kwargs): return Response({'message': f'Could not launch objective creation with dry-run on this instance: {str(e)}'}, status=status.HTTP_400_BAD_REQUEST) - url_http = 'http' if settings.DEBUG else 'https' - current_site = getattr(settings, "SITE_HOST") - site_port = getattr(settings, "SITE_PORT", None) - if site_port: - current_site = f'{current_site}:{site_port}' - task_route = f'{url_http}://{current_site}{reverse("substrapp:task-detail", args=[task.id])}' + current_site = getattr(settings, "DEFAULT_DOMAIN") + task_route = f'{current_site}{reverse("substrapp:task-detail", args=[task.id])}' msg = f'Your dry-run has been taken in account. You can follow the task execution on {task_route}' return Response({'id': task.id, 'message': msg}, status=status.HTTP_202_ACCEPTED) From cc54bbe7bab483dd4a90ad0f5cf0c7f8079e3510 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 23 Apr 2019 16:18:19 +0200 Subject: [PATCH 058/106] update algo interface --- substrabac/substrapp/tasks.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/substrabac/substrapp/tasks.py b/substrabac/substrapp/tasks.py index 9bc381b74..e2ba77571 100644 --- a/substrabac/substrapp/tasks.py +++ b/substrabac/substrapp/tasks.py @@ -409,22 +409,22 @@ def doTask(subtuple, tuple_type): # create the command option for algo if tuple_type == 'traintuple': - algo_command = '--train' # main command + algo_command = 'train' # main command # add list of inmodels if subtuple['inModels'] is not None: inmodels = [subtuple_model["traintupleKey"] for subtuple_model in subtuple['inModels']] - algo_command += f' --inmodels {" ".join(inmodels)}' + algo_command += " ".join(inmodels) # add fltask rank for training if flrank is not None: algo_command += f' --rank {flrank}' elif tuple_type == 'testtuple': - algo_command = '--predict' # main command + algo_command = 'predict' # main command inmodels = subtuple['model']["traintupleKey"] - algo_command += f' --inmodels {inmodels}' + algo_command += inmodels # local volume for fltask if fltask is not None and tuple_type == 'traintuple': From 67caf1121b6d389cbab714cc70749152a9b5b62a Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 23 Apr 2019 16:35:01 +0200 Subject: [PATCH 059/106] update metrics interface --- substrabac/base_metrics/Dockerfile | 14 +++++--------- substrabac/base_metrics/calc_metrics.py | 17 ----------------- substrabac/fake_metrics/Dockerfile | 14 +++++--------- substrabac/fake_metrics/calc_fake_metrics.py | 17 ----------------- 4 files changed, 10 insertions(+), 52 deletions(-) delete mode 100644 substrabac/base_metrics/calc_metrics.py delete mode 100644 substrabac/fake_metrics/calc_fake_metrics.py diff --git a/substrabac/base_metrics/Dockerfile b/substrabac/base_metrics/Dockerfile index ce69293d4..8b9f1df2d 100644 --- a/substrabac/base_metrics/Dockerfile +++ b/substrabac/base_metrics/Dockerfile @@ -1,13 +1,9 @@ -FROM nvidia/cuda:9.0-base - -RUN apt-get update; apt-get install -y build-essential libssl-dev python3 python3-dev python3-pip -RUN pip3 install --upgrade pip -RUN pip3 install pillow numpy sklearn pandas +FROM substra/substratools +RUN mkdir -p /sandbox +RUN mkdir -p /sandbox/opener RUN mkdir -p /sandbox/metrics - WORKDIR /sandbox -ADD ./calc_metrics.py . - -ENTRYPOINT ["python3", "calc_metrics.py"] +ENTRYPOINT ["python3"] +CMD ["-c", "import substratools as tools; tools.metrics.execute()"] diff --git a/substrabac/base_metrics/calc_metrics.py b/substrabac/base_metrics/calc_metrics.py deleted file mode 100644 index 89533d9c8..000000000 --- a/substrabac/base_metrics/calc_metrics.py +++ /dev/null @@ -1,17 +0,0 @@ -import json -import metrics -import opener - - -def calc_perf(folder_true="./data", folder_pred="./pred"): - """compute performances using the imported metrics.score function""" - # get true and pred values - y_true = opener.get_y(folder_true) - y_pred = opener.get_pred(folder_pred) - return {'all': metrics.score(y_true, y_pred)} - - -if __name__ == "__main__": - perf = calc_perf() - with open('./pred/perf.json', 'w') as outfile: - json.dump(perf, outfile) diff --git a/substrabac/fake_metrics/Dockerfile b/substrabac/fake_metrics/Dockerfile index 774ea7542..945986af4 100644 --- a/substrabac/fake_metrics/Dockerfile +++ b/substrabac/fake_metrics/Dockerfile @@ -1,13 +1,9 @@ -FROM nvidia/cuda:9.0-base - -RUN apt-get update; apt-get install -y build-essential libssl-dev python3 python3-dev python3-pip -RUN pip3 install --upgrade pip -RUN pip3 install pillow numpy sklearn pandas +FROM substra/substratools +RUN mkdir -p /sandbox +RUN mkdir -p /sandbox/opener RUN mkdir -p /sandbox/metrics - WORKDIR /sandbox -ADD ./calc_fake_metrics.py . - -ENTRYPOINT ["python3", "calc_fake_metrics.py"] +ENTRYPOINT ["python3"] +CMD ["-c", "import substratools as tools; tools.metrics.execute(dry_run=True)"] diff --git a/substrabac/fake_metrics/calc_fake_metrics.py b/substrabac/fake_metrics/calc_fake_metrics.py deleted file mode 100644 index ebba3361a..000000000 --- a/substrabac/fake_metrics/calc_fake_metrics.py +++ /dev/null @@ -1,17 +0,0 @@ -import json -import metrics -import opener - - -def calc_perf(folder_pred="./pred"): - """compute performances using the imported metrics.score function""" - # get true and pred values - y_true = opener.fake_y() - y_pred = opener.fake_y() - return {'all': metrics.score(y_true, y_pred)} - - -if __name__ == "__main__": - perf = calc_perf() - with open('./pred/perf.json', 'w') as outfile: - json.dump(perf, outfile) From 74ed212620149fdc682e8975a78c081e79a762ed Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 23 Apr 2019 18:19:43 +0200 Subject: [PATCH 060/106] add missing space in predict command --- substrabac/substrapp/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/substrabac/substrapp/tasks.py b/substrabac/substrapp/tasks.py index e2ba77571..5973576be 100644 --- a/substrabac/substrapp/tasks.py +++ b/substrabac/substrapp/tasks.py @@ -424,7 +424,7 @@ def doTask(subtuple, tuple_type): algo_command = 'predict' # main command inmodels = subtuple['model']["traintupleKey"] - algo_command += inmodels + algo_command += f' {inmodels}' # local volume for fltask if fltask is not None and tuple_type == 'traintuple': From 8afb218afc08440d4822ca358728d5d57c1909f2 Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 29 Apr 2019 14:19:25 +0200 Subject: [PATCH 061/106] rename substra/substratools image to substratools --- substrabac/base_metrics/Dockerfile | 2 +- substrabac/fake_metrics/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/substrabac/base_metrics/Dockerfile b/substrabac/base_metrics/Dockerfile index 8b9f1df2d..84776bd46 100644 --- a/substrabac/base_metrics/Dockerfile +++ b/substrabac/base_metrics/Dockerfile @@ -1,4 +1,4 @@ -FROM substra/substratools +FROM substratools RUN mkdir -p /sandbox RUN mkdir -p /sandbox/opener diff --git a/substrabac/fake_metrics/Dockerfile b/substrabac/fake_metrics/Dockerfile index 945986af4..b36d0d458 100644 --- a/substrabac/fake_metrics/Dockerfile +++ b/substrabac/fake_metrics/Dockerfile @@ -1,4 +1,4 @@ -FROM substra/substratools +FROM substratools RUN mkdir -p /sandbox RUN mkdir -p /sandbox/opener From 6dda58de03ec5d903e0d6ddc81d453d44146d22b Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 16:49:34 +0200 Subject: [PATCH 062/106] access docker registry from tests --- .cicd/agent-python.yaml | 6 ++++++ Jenkinsfile | 1 + 2 files changed, 7 insertions(+) diff --git a/.cicd/agent-python.yaml b/.cicd/agent-python.yaml index a4b77ea5c..c0abd5a32 100644 --- a/.cicd/agent-python.yaml +++ b/.cicd/agent-python.yaml @@ -9,8 +9,14 @@ spec: volumeMounts: - { name: tmp, mountPath: /tmp } - { name: docker, mountPath: /var/run/docker.sock } + - { name: kaniko-secret, mountPath: /secret } + env: + - { name: GOOGLE_APPLICATION_CREDENTIALS, value: /secret/kaniko-secret.json } volumes: - name: tmp hostPath: { path: /tmp, type: Directory } - name: docker hostPath: { path: /var/run/docker.sock, type: File } + - name: kaniko-secret + secret: + secretName: kaniko-secret diff --git a/Jenkinsfile b/Jenkinsfile index 0653b390b..3a87c5817 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -31,6 +31,7 @@ pipeline { sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { + sh "gcloud auth configure-docker" sh "pip install -r requirements.txt" sh "DJANGO_SETTINGS_MODULE=substrabac.settings.test coverage run manage.py test" sh "coverage report" From 522af8db2910bacc0b14ef2995ef88a0269ce264 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 16:54:24 +0200 Subject: [PATCH 063/106] ci: isntall gcloud --- Jenkinsfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 3a87c5817..827fd54af 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -29,9 +29,10 @@ pipeline { steps { sh "apt update" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" + sh "apt install -y google-cloud-sdk" + sh "gcloud auth configure-docker" dir("substrabac") { - sh "gcloud auth configure-docker" sh "pip install -r requirements.txt" sh "DJANGO_SETTINGS_MODULE=substrabac.settings.test coverage run manage.py test" sh "coverage report" From a6f5df65b7348a6acbf6a6b2f056bb8a34a0de74 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 17:17:21 +0200 Subject: [PATCH 064/106] ci: fix docker login --- Jenkinsfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 827fd54af..5b3a80899 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -29,8 +29,7 @@ pipeline { steps { sh "apt update" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" - sh "apt install -y google-cloud-sdk" - sh "gcloud auth configure-docker" + sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" dir("substrabac") { sh "pip install -r requirements.txt" From 58da31d1b3cb74fde2e32383eb4d1d94aba126ea Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 17:20:09 +0200 Subject: [PATCH 065/106] ci: do docker loging first; fail fast --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 5b3a80899..235313aa5 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -28,8 +28,8 @@ pipeline { steps { sh "apt update" - sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" + sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { sh "pip install -r requirements.txt" From 8d02389a1135a7edf8fc2f8db57884b9ba2d35d7 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 17:27:11 +0200 Subject: [PATCH 066/106] ci: docker login must be done after requirements --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 235313aa5..ce4edcd1a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -28,11 +28,11 @@ pipeline { steps { sh "apt update" - sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { sh "pip install -r requirements.txt" + sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" sh "DJANGO_SETTINGS_MODULE=substrabac.settings.test coverage run manage.py test" sh "coverage report" sh "coverage html" From 4ec1381b43dbe9201479284c4325d1ede54c18d5 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 17:36:54 +0200 Subject: [PATCH 067/106] install docker cli manually --- Jenkinsfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index ce4edcd1a..c5829e33f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -29,10 +29,11 @@ pipeline { steps { sh "apt update" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" + sh "apt install -y docker" + sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" dir("substrabac") { sh "pip install -r requirements.txt" - sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" sh "DJANGO_SETTINGS_MODULE=substrabac.settings.test coverage run manage.py test" sh "coverage report" sh "coverage html" From 664e50b113e8264c4c1e9894f06a752a327f4ee3 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 17:50:22 +0200 Subject: [PATCH 068/106] ci: manual install of docker --- Jenkinsfile | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index c5829e33f..b40e481ff 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -28,9 +28,11 @@ pipeline { steps { sh "apt update" - sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" - sh "apt install -y docker" + sh "export DOCKER_URL=https://download.docker.com/linux/static/stable/x86_64/docker-18.06.3-ce.tgz" + sh "echo $DOCKER_URL" + sh "apt install curl && mkdir -p /tmp/download && curl -L $DOCKER_URL | tar -xz -C /tmp/download && mv /tmp/download/docker/docker /usr/local/bin/" sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" + sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { sh "pip install -r requirements.txt" From 507ed9f830116ebdc0c8864948e9739d8151712c Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 17:54:00 +0200 Subject: [PATCH 069/106] ci: avoid export --- Jenkinsfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index b40e481ff..03b0c0b57 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -28,9 +28,7 @@ pipeline { steps { sh "apt update" - sh "export DOCKER_URL=https://download.docker.com/linux/static/stable/x86_64/docker-18.06.3-ce.tgz" - sh "echo $DOCKER_URL" - sh "apt install curl && mkdir -p /tmp/download && curl -L $DOCKER_URL | tar -xz -C /tmp/download && mv /tmp/download/docker/docker /usr/local/bin/" + sh "apt install curl && mkdir -p /tmp/download && curl -L https://download.docker.com/linux/static/stable/x86_64/docker-18.06.3-ce.tgz | tar -xz -C /tmp/download && mv /tmp/download/docker/docker /usr/local/bin/" sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" From 081041717bb9cd58743fe46f0bddc2042f19a1d2 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 18:02:21 +0200 Subject: [PATCH 070/106] ci: modify docker login --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 03b0c0b57..b177c9826 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -29,7 +29,7 @@ pipeline { steps { sh "apt update" sh "apt install curl && mkdir -p /tmp/download && curl -L https://download.docker.com/linux/static/stable/x86_64/docker-18.06.3-ce.tgz | tar -xz -C /tmp/download && mv /tmp/download/docker/docker /usr/local/bin/" - sh "docker login -u _json_key --password-stdin https://gcr.io < /secret/kaniko-secret.json" + sh "docker login -u _json_key --password-stdin https://eu.gcr.io/substra-208412/ < /secret/kaniko-secret.json" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { From 35b09d3623d50b6d15c5ecc1ef24ac6f562ebe1a Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 18:02:58 +0200 Subject: [PATCH 071/106] ci: pull substratools image to fail fast --- Jenkinsfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Jenkinsfile b/Jenkinsfile index b177c9826..bf610a3bc 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -30,6 +30,7 @@ pipeline { sh "apt update" sh "apt install curl && mkdir -p /tmp/download && curl -L https://download.docker.com/linux/static/stable/x86_64/docker-18.06.3-ce.tgz | tar -xz -C /tmp/download && mv /tmp/download/docker/docker /usr/local/bin/" sh "docker login -u _json_key --password-stdin https://eu.gcr.io/substra-208412/ < /secret/kaniko-secret.json" + sh "docker pull substratools" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { From 37c1ad133d490cb40da910c591ad71abf8ced9be Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 18:31:14 +0200 Subject: [PATCH 072/106] ci: add gcr registry --- Jenkinsfile | 2 +- substrabac/base_metrics/Dockerfile | 2 +- substrabac/fake_metrics/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index bf610a3bc..534585d87 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -30,7 +30,7 @@ pipeline { sh "apt update" sh "apt install curl && mkdir -p /tmp/download && curl -L https://download.docker.com/linux/static/stable/x86_64/docker-18.06.3-ce.tgz | tar -xz -C /tmp/download && mv /tmp/download/docker/docker /usr/local/bin/" sh "docker login -u _json_key --password-stdin https://eu.gcr.io/substra-208412/ < /secret/kaniko-secret.json" - sh "docker pull substratools" + sh "docker pull eu.gcr.io/substra-208412/substratools" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { diff --git a/substrabac/base_metrics/Dockerfile b/substrabac/base_metrics/Dockerfile index 84776bd46..679578a93 100644 --- a/substrabac/base_metrics/Dockerfile +++ b/substrabac/base_metrics/Dockerfile @@ -1,4 +1,4 @@ -FROM substratools +FROM eu.gcr.io/substra-208412/substratools RUN mkdir -p /sandbox RUN mkdir -p /sandbox/opener diff --git a/substrabac/fake_metrics/Dockerfile b/substrabac/fake_metrics/Dockerfile index b36d0d458..61a90bf85 100644 --- a/substrabac/fake_metrics/Dockerfile +++ b/substrabac/fake_metrics/Dockerfile @@ -1,4 +1,4 @@ -FROM substratools +FROM eu.gcr.io/substra-208412/substratools RUN mkdir -p /sandbox RUN mkdir -p /sandbox/opener From a2510d76fb36e85804b6185064a1b9900b3a5be7 Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 30 Apr 2019 18:35:11 +0200 Subject: [PATCH 073/106] ci: remove testing bash line --- Jenkinsfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 534585d87..b177c9826 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -30,7 +30,6 @@ pipeline { sh "apt update" sh "apt install curl && mkdir -p /tmp/download && curl -L https://download.docker.com/linux/static/stable/x86_64/docker-18.06.3-ce.tgz | tar -xz -C /tmp/download && mv /tmp/download/docker/docker /usr/local/bin/" sh "docker login -u _json_key --password-stdin https://eu.gcr.io/substra-208412/ < /secret/kaniko-secret.json" - sh "docker pull eu.gcr.io/substra-208412/substratools" sh "apt install -y python3-pip python3-dev build-essential gfortran musl-dev postgresql-contrib git curl netcat" dir("substrabac") { From 71020d438096f101c5172781f6e6efdfd7f09274 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 2 May 2019 12:11:28 +0200 Subject: [PATCH 074/106] populate assets should use substratools --- .../chunantes/algos/algo3/algo.tar.gz | Bin 1974 -> 1170 bytes .../datamanagers/datamanager0/opener.py | 128 ++++++++++-------- .../objectives/objective0/metrics.py | 8 +- .../owkin/datamanagers/datamanager0/opener.py | 126 +++++++++-------- .../owkin/objectives/objective0/metrics.py | 8 +- 5 files changed, 149 insertions(+), 121 deletions(-) diff --git a/substrabac/fixtures/chunantes/algos/algo3/algo.tar.gz b/substrabac/fixtures/chunantes/algos/algo3/algo.tar.gz index 604091d645513b639ef96d6cd2328252a006ef94..8805ccf0b6c5918f6c66d369556a1f0fa6facc30 100644 GIT binary patch literal 1170 zcmV;D1a12tiwFP>yvkev1MOE^Z`(E$?z4WyS^6Luux0s@c*x*~u5N>(STZCjmLUiP z+M*mT5-EsO5^val-ytd4a_uC;fNWU7??t?v8_$J2BH2=e#p=y#B8sA;@fh@Xa&&0E z(a_E{#>2xA3`XN4{2opYhY$@$!|3P@M6YU9jY=v~qA8N(8I{$ObV!z_&&aW7f%W%) z0^j#9s(H?&WP$^IBzcihcwp)lhR5g#-i*m0ryp&WU7;_kyQv8Kf+uoaA)s8w2}D zV@(ZlfFKFD+)iKM#xY6xf(B-@ zi=ha4Ldue?IzTf?Ez_1P?I@JjwOiVS+Mtjrn#-WGMT#nr^13&;f$0?d*ayNBy#(Lk zG!s(BX~h=`^Cdx}7<}11a-Z*RPshmioln zim7iySE6~sY%4k)4>yOwgBZUuX;SD^aC-cLQ#5_!lx^Bt;I8v^VD`{zNUF4#19PaZ zRfVxKk+STa&JLpv;NKT#rF`vsL-CdK+fFZtx^3BPZUIqH9_WoO_@&>`g_&;KQ480% zL8wIrX=kHDl6$&a057Y0*sIFu(6!YZwyg>apYdADRToyZE@@KXZW^`Jy~Udj$rs9| zW3$}UIpe7a{1d8_zHBs!zwzZ&Y!qQKUai+w>$O!&hfWBrzl8lxxAn9{*il$gc}of! zG;=e%j&8cp8|+i9OPlami5M^5pkafa-pDE8%}N)@Ba?Xc4)pb&RLSm^*RnZfsxCIc zpL5PfW}Rbb{g#NPQmyat2)aD%#9l(W-qzHMEJa!!vvAKNm1-+t zHcMvEj=!Eh8Gg&O8-2aL6?|NHBKBK5-am`P^L|AQJ6Dh8z^4=$MdtFfL7shfnA^I*FAV|7=1q;4Eybt-1E-AoB}CSZ%GwY1wIb+hxOdFKiHJ_ zRxqzvmh`BUlq*Iu?1bi0X1#>o0~9NDEBL7Sn#E>Zu)?Uo?`SQrz)oL0)V~+?o#c7H zoSlE3eL9D}#20oh9zE~fYzASU*uPl_J@3Qm<@v8apM5&LgzJM^#X%PiYLbH+cb_;o kI5;>sI5;>sI5;>sI5;>sI5;>sIDDJ<3qhb%#Q-P(07_X!Q~&?~ literal 1974 zcmV;n2TAxJiwFP>Q*B%T1MOMeZsWQUPK*Aay6ROg3KW=bE+nJ2;yBqHG=)%XPm(>0 zvzsgupLUCFATSbT^F$H_kxJGr+Gpt_^m+O;ogpP#vK?>QZniCI0)b3&IP-D%&2Xfg z$mo`riAed*leOV+cyxFO^!(!Jxp@vpwr|?O^AYSH9F9-TQ`NMK3mLwbW*sZ|)`9IRmE-&6dekWqV_M$3g$G`N4gBzx}<6M4o4y@3F z3D-~+G1L54^S;Q@A887M8QfGNjR#yQo@>EUNX3m(=^*BJ0L4sCWq!~+&0zDp5QQ;; z&x5(XLZqp@hrG&)8K~Qovod#_-(6gOee&uO_7uzG8~MR;UYwkOzsJ&v^ij}voYV91 z_BfR<#^cNSC$0D^{{47K z{Htu|NEnZ zqod{c$I)&4e~#w5?p1Y@38jS0fq!5b1|r-GJ#DUElyF2=$Wnt;&985j%$wh-oJ>SM zX}YYO6s*JoqVC(foGHi)Cn;st==D>P<1@@;%u}d^uU?*fiCI92MDWs?+bwub-Ca17 zCFJtH-xx)tST*EQ!wrX$lQfU9ngHCC(j|ig7bqT8rO-1JV2E2e^2JPy*>fBxBDC;^1dkm`oaipI18bRFs|gat z*`D2M%u8{{QGlGGr@X;xlz>raG{rnY51HL4E1{)K6`e;t4Phv9p~KKqJWcw5d6R{S zOwmH%`g_zx~1n?8<*S#vy1! z>58?`pjfNd6>GyoxpucJ*ujgEoqkuevkesO^qsD3lqM+Ap`&-t$}FUGp@VT*aU!8o zfou6uVreSPwC2I#s5e))3(=HEw;&P>FQGKCmg6I1#0v}+mMOr+b&U;mJR~5n%vOdV z5!K_3K3&cho}c4QC5uCf%CHVEuh~YgBis;JN8p8@o8Dg@^}M`b6M{?A@M$74PXKW)y}D|*d1TpV9nze7b&5k z=&Fll0<6hBjgxa6BAq}C@aH;CBvOfz%DpUaZ6Mb|^%y^O%(-oArRF=pZxR zaq4m-;HbZEMa^yzpD#4elr@91?+5$W5Cq_cu3;e#!OyWv+zyi}k2GEkq5J2#C381W zM)sug3#O<3w^Gz}YlmrnP0;oF;NH7Ex($*pA?OtR7_SO%*-L#$S~OVC^R+a9*C_h{ z4=|aU+}h=X$;d56w2v0OwI<&P%M(hI-TmysSLjV7ip()eaJorM9jSlPmmS;hhu7rJ za;-aCWHg>7w|PscU^6{d^{u5PQFe_iHN`b4^bF>{HH9oT9Jb4Qy_JPT?Z>OLS{Cdu z$7dCM2xV!>?Pg4u=z zyFd=b$d1Jyc^Bcrc>T*Sxtg+qdrjZ;kO+Mk?5_h&NrUB~mLXPQUb74aG?8(ZHzR#u zcr^vXUJv$YPU(1css%a0yUB(OM#=`So(b#Kx?)(lVtrJ!FjV^v88XS(2hZ5u@7SeF zU>>^`z+eC;7jS+thO>*ym#1SGfA#7LuEwWtAHamS13^k`UA7k4`n#rknbu&afDtA~ zk}Irx$srU@GzUvH%Pcgb!NLW#1{{|+tZ~M|PE8{Ni9|iCp}$g1wZg#2ZHxkw>RZHX za|bi#S=RW!frVp>V~9cBxc?=OKr2%r_WN3pchwq*A7Eld4?G(3q$wV#)0{K zK&4CbDZzhHPO6OO`mLF%d1=Lb7RMp0=RJ2o@3!t>fN?ayIKmDy`wm=%#X3ja6~{pg zHCClMIAd6E^dGFEr#vkJ_cg;ZcuJuOq;ivWZLJ&cd`jC^;U>$4wOpUm=e3<#+jV)W z=<5O&Yj>;++%YZIRgvVrHbEtQ1D#M z)UA2_67 Date: Thu, 2 May 2019 12:21:42 +0200 Subject: [PATCH 075/106] update assets --- .../fixtures/owkin/datamanagers/datamanager0/opener.py | 2 +- substrabac/substrapp/views/datamanager.py | 8 +------- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/substrabac/fixtures/owkin/datamanagers/datamanager0/opener.py b/substrabac/fixtures/owkin/datamanagers/datamanager0/opener.py index f7dbd14d2..8d54fcd65 100644 --- a/substrabac/fixtures/owkin/datamanagers/datamanager0/opener.py +++ b/substrabac/fixtures/owkin/datamanagers/datamanager0/opener.py @@ -1,4 +1,4 @@ -"""Opener of the simplified ISIC 2018 dataset""" +"""Opener of the simplified ISIC 2018 dataset (other opener)""" import os import csv import numpy as np diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index 96869ef72..283023ec6 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -35,13 +35,7 @@ def perform_create(self, serializer): def dryrun(self, data_opener): - mandatory_functions = {'get_X': {'folder'}, - 'get_y': {'folder'}, - 'save_pred': {'y_pred', 'folder'}, - 'get_pred': {'folder'}, - 'fake_X': {'n_sample'}, - 'fake_y': {'n_sample'} - } + mandatory_functions = {} # currently check by the substratools pacakge file = data_opener.open().read() From f9333b50f4d45022a14924493848f686ab6291ac Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 2 May 2019 13:53:49 +0200 Subject: [PATCH 076/106] fix assets with substratools --- substrabac/fake_data_sample/Dockerfile | 2 +- substrabac/fake_data_sample/open_data_sample.py | 12 ++++-------- substrabac/substrapp/tests/tests_views.py | 12 ------------ 3 files changed, 5 insertions(+), 21 deletions(-) diff --git a/substrabac/fake_data_sample/Dockerfile b/substrabac/fake_data_sample/Dockerfile index 4fe1639f2..a47637b76 100644 --- a/substrabac/fake_data_sample/Dockerfile +++ b/substrabac/fake_data_sample/Dockerfile @@ -1,4 +1,4 @@ -FROM nvidia/cuda:9.0-base +FROM eu.gcr.io/substra-208412/substratools RUN apt-get update; apt-get install -y build-essential libssl-dev python3 python3-dev python3-pip RUN pip3 install --upgrade pip diff --git a/substrabac/fake_data_sample/open_data_sample.py b/substrabac/fake_data_sample/open_data_sample.py index 3fce5230d..3b15172b9 100644 --- a/substrabac/fake_data_sample/open_data_sample.py +++ b/substrabac/fake_data_sample/open_data_sample.py @@ -1,11 +1,7 @@ -import opener - - -def open_data_samples(data_folder='./data'): - """Open data sample with the opener""" - opener.get_X(data_folder) - opener.get_y(data_folder) +import substratools as tools if __name__ == "__main__": - open_data_samples() + opener = tools.opener.load_from_module() + opener.get_X() + opener.get_y() diff --git a/substrabac/substrapp/tests/tests_views.py b/substrabac/substrapp/tests/tests_views.py index 562f680bf..924395064 100644 --- a/substrabac/substrapp/tests/tests_views.py +++ b/substrabac/substrapp/tests/tests_views.py @@ -787,18 +787,6 @@ def test_datamanager_create_dryrun(self): self.assertEqual(response.data, {'message': f'Your data opener is valid. You can remove the dryrun option.'}) self.assertEqual(response.status_code, status.HTTP_200_OK) - # Will fail because metrics.py instead of opener - files = {'data_opener': open(os.path.join(dir_path, - '../../fixtures/owkin/objectives/objective0/metrics.py'), - 'rb'), - 'description': open(os.path.join(dir_path, - '../../fixtures/chunantes/datamanagers/datamanager0/description.md'), - 'rb')} - - response = self.client.post(url, {**data, **files}, format='multipart', **self.extra) - self.assertIn('please review your opener and the documentation.', response.data['message']) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - for x in files: files[x].close() From 12abc75801201c5971a3ebbdb2dc159e95a9b8bb Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 2 May 2019 13:55:35 +0200 Subject: [PATCH 077/106] update algo4 with substratools --- .../chunantes/algos/algo4/algo.tar.gz | Bin 1204 -> 1205 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/substrabac/fixtures/chunantes/algos/algo4/algo.tar.gz b/substrabac/fixtures/chunantes/algos/algo4/algo.tar.gz index bf03eda7410917d4b5fd54df61708c72a727287d..ce1b4e652d7e63368816eb9949011f180c3c96e6 100644 GIT binary patch literal 1205 zcmV;m1WNlKiwFRe+R9u21MOH_PvbZgp6C3Eqwqi~mC!<4B&0m-PANMN10(3JW`$5> z5~uZ&#F6a)onODlcG9K;l$i&1w30tn#FumPIiDQ|k}r5ztv2pl(i|G$>+|WyoS@H)QQ0i6)Fq+1C9W8PRwX@*%fYwM;>7pj?tIA$eNaU4jRXMISCB5}rZ^bq;s zq3-&9s02+rqq>WuCYD&|Ko8msn7@9r5Di&ZR0O?n?WgOZ)82N{X>a-2GG#VXk%>v(fc()b@FR^0GEXqVC35^lM}%8UoT zt*@+;gi4v!dAD>%&`*1LPDd@5aOH#&)$9Mt;tr{HY ziGq@{9q9u>) zVEhiZ{7UjbZIN+T*Z>X=V0sA`mvfk1UY(!KVgARbYq*}DeR(COYx1B&%Ix{}Vr1IQ zTl=I5MxPlhQTqLaKbu~i5PkaTIk5Ha@WYHon-nw_aGc+fHW?c~4UJ4lS~Rd0`qn__ z$m(nZBe!!Gn6soqye@a}-1%|7ujOr{%TV2Ym7rGXIY0II7Cmv`6?SgTbWc)VRJHoP zG+QkFfhwT&w~DP}-Bs(KDV@jkTT#E(;~1g{{5UR<2XXA%5-K9^b-XL7x5vZH@Ya?a z)$A0i80Pq=ukl{LlQi?~CJVz3>+M#Gn%lv5f9R4p2*hyd7`H>@Z~Q_@%D-Wva( z!uS2_=B{K?GF}0FB4u4rcw}k;^1h%%R4_&vkCinS3gl9D&4SP8teSVPRa4eWAQGzD z%LOkx^{`+SK9iDXw18Il`h4=SAm}q^RCuj?b0?J`it|FQ}MGy_(3W*|oeNz+C!wL}3Tc_qD8IlLp1r5|~fvw4uSf>xxY z@tz6$0A=h1LTv4!L;66AxYL}R{r>0q!1KIR&k0PlK|l1tsRszmqnWjhLuXy-00}O^ z1Dl?V3id!zKqo{k=tcs-m^YR(nqyYY-1=$Lg=&W(Nm#{Hk_3_#`4BRqNRsmcJ;Z)^ zpkMtV)PiQcUj2%_CYD6!KzG^m-luDU5MX>~J2&=KFRSQpce>S~3A$s@IV_@su>u*C}W%U_ZYjT{6~wS{j*pP~BoRf1Zj=lse`a`>`+Fn}pJ+=vX+h;5IGhR)`I;WDZ zU0^i0_5VeRhMJE?;G1~#(TeE-~JZasL}dqho#l8;{2)AI3+=`u=x}*Fbmw`wuX^y8PVTA@_J@4#dcJ=p{%TE_Gxbf?yTJY)+?+Jb@;@|%2SvWX2I5;>sI5;>sI5;>sI5;>s SI5;>s^zkpJ0SK!AC;$M=>r`z3 From fe94c91c8be1193f7cfa00ac11f41acbba14f247 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 2 May 2019 13:57:39 +0200 Subject: [PATCH 078/106] update assets hashes after populate --- substrabac/substrapp/tests/assets.py | 180 +++++++++++++-------------- 1 file changed, 90 insertions(+), 90 deletions(-) diff --git a/substrabac/substrapp/tests/assets.py b/substrabac/substrapp/tests/assets.py index 1e4a55eac..50dfd0e8e 100644 --- a/substrabac/substrapp/tests/assets.py +++ b/substrabac/substrapp/tests/assets.py @@ -8,10 +8,10 @@ }, "metrics": { "name": "macro-average recall", - "hash": "0bc732c26bafdc41321c2bffd35b6835aa35f7371a4eb02994642c2c3a688f60", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/1cdafbb018dd195690111d74916b76c96892d897ec3587c814f287946db446c3/metrics/" }, - "owner": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", + "owner": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7", "testDataset": None, "permissions": "all" }, @@ -24,12 +24,12 @@ }, "metrics": { "name": "macro-average recall", - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" }, - "owner": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", + "owner": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7", "testDataset": { - "dataManagerKey": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", + "dataManagerKey": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", "dataSampleKeys": [ "8bf3bf4f753a32f27d18c86405e7a406a83a55610d91abcca9acc525061b8ecf", "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a" @@ -43,32 +43,32 @@ { "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "description": { - "hash": "258bef187a166b3fef5cb86e68c8f7e154c283a148cd5bc344fec7e698821ad3", - "storageAddress": "http://testserver/data_manager/9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528/description/" + "hash": "15863c2af1fcfee9ca6f61f04be8a0eaaf6a45e4d50c421788d450d198e580f1", + "storageAddress": "http://testserver/data_manager/615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7/description/" }, - "key": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", - "name": "Simplified ISIC 2018", + "key": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", + "name": "ISIC 2018", "opener": { - "hash": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", - "storageAddress": "http://testserver/data_manager/9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528/opener/" + "hash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", + "storageAddress": "http://testserver/data_manager/615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7/opener/" }, - "owner": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", + "owner": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "permissions": "all", "type": "Images" }, { "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "description": { - "hash": "15863c2af1fcfee9ca6f61f04be8a0eaaf6a45e4d50c421788d450d198e580f1", - "storageAddress": "http://testserver/data_manager/59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/description/" + "hash": "258bef187a166b3fef5cb86e68c8f7e154c283a148cd5bc344fec7e698821ad3", + "storageAddress": "http://testserver/data_manager/82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2/description/" }, - "key": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", - "name": "ISIC 2018", + "key": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", + "name": "Simplified ISIC 2018", "opener": { - "hash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", - "storageAddress": "http://testserver/data_manager/59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/opener/" + "hash": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", + "storageAddress": "http://testserver/data_manager/82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2/opener/" }, - "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "owner": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7", "permissions": "all", "type": "Images" } @@ -86,35 +86,35 @@ "hash": "b9463411a01ea00869bdffce6e59a5c100a4e635c0a9386266cad3c77eb28e9e", "storageAddress": "http://testserver/algo/0acc5180e09b6a6ac250f4e3c172e2893f617aa1c22ef1f379019d20fe44142f/description/" }, - "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "owner": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "permissions": "all" }, { - "key": "da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b", + "key": "4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7", "name": "Logistic regression", "content": { - "hash": "da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b", - "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" + "hash": "4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7", + "storageAddress": "http://testserver/algo/4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7/file/" }, "description": { "hash": "124a0425b746d7072282d167b53cb6aab3a31bf1946dae89135c15b0126ebec3", - "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/description/" + "storageAddress": "http://testserver/algo/4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7/description/" }, - "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "owner": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "permissions": "all" }, { - "key": "f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284", + "key": "9c3d8777e11fd72cbc0fd672bec3a0848f8518b4d56706008cc05f8a1cee44f9", "name": "Random Forest", "content": { - "hash": "f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284", - "storageAddress": "http://testserver/algo/f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284/file/" + "hash": "9c3d8777e11fd72cbc0fd672bec3a0848f8518b4d56706008cc05f8a1cee44f9", + "storageAddress": "http://testserver/algo/9c3d8777e11fd72cbc0fd672bec3a0848f8518b4d56706008cc05f8a1cee44f9/file/" }, "description": { "hash": "4acea40c4b51996c88ef279c5c9aa41ab77b97d38c5ca167e978a98b2e402675", - "storageAddress": "http://testserver/algo/f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284/description/" + "storageAddress": "http://testserver/algo/9c3d8777e11fd72cbc0fd672bec3a0848f8518b4d56706008cc05f8a1cee44f9/description/" }, - "owner": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "owner": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "permissions": "all" } ] @@ -126,24 +126,24 @@ "name": "Neural Network", "storageAddress": "http://testserver/algo/0acc5180e09b6a6ac250f4e3c172e2893f617aa1c22ef1f379019d20fe44142f/file/" }, - "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "creator": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "dataset": { "keys": [ - "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", + "31510dc1d8be788f7c5d28d05714f7efb9edb667762966b9adc02eadeaacebe9", "03a1f878768ea8624942d46a3b438c37992e626c2cf655023bcc3bed69d485d1" ], - "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", + "openerHash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "perf": 0, - "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" + "worker": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426" }, "fltask": "", "inModels": None, - "key": "1a585c39a427b14e96388f2fb2acd10bc0b26560022a40cb371cbcc55b3cafc7", - "log": "[00-01-0032-45bad7f]", + "key": "c4e3116dd3f895986b77e4d445178330630bd3f52407f10462dd4778e40090e0", + "log": "[00-01-0032-7cc5b61]", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" } }, @@ -155,34 +155,34 @@ }, { "algo": { - "hash": "da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b", + "hash": "4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7", "name": "Logistic regression", - "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" + "storageAddress": "http://testserver/algo/4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7/file/" }, - "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "creator": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "dataset": { "keys": [ - "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", + "31510dc1d8be788f7c5d28d05714f7efb9edb667762966b9adc02eadeaacebe9", "03a1f878768ea8624942d46a3b438c37992e626c2cf655023bcc3bed69d485d1" ], - "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", + "openerHash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "perf": 1, - "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" + "worker": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426" }, "fltask": "", "inModels": None, - "key": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1", - "log": "Train - CPU:77.60 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", + "key": "3979576752e014adddadfc360d79c67cdccb0f4bae46936f35ce09c64e5832c8", + "log": "Train - CPU:173.81 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" } }, "outModel": { - "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", - "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/" + "hash": "592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075", + "storageAddress": "http://testserver/model/592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075/file/" }, "permissions": "all", "rank": 0, @@ -191,28 +191,28 @@ }, { "algo": { - "hash": "f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284", + "hash": "9c3d8777e11fd72cbc0fd672bec3a0848f8518b4d56706008cc05f8a1cee44f9", "name": "Random Forest", - "storageAddress": "http://testserver/algo/f2d9fd38e25cd975c49f3ce7e6739846585e89635a86689b5db42ab2c0c57284/file/" + "storageAddress": "http://testserver/algo/9c3d8777e11fd72cbc0fd672bec3a0848f8518b4d56706008cc05f8a1cee44f9/file/" }, - "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "creator": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "dataset": { "keys": [ - "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", + "31510dc1d8be788f7c5d28d05714f7efb9edb667762966b9adc02eadeaacebe9", "03a1f878768ea8624942d46a3b438c37992e626c2cf655023bcc3bed69d485d1" ], - "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", + "openerHash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "perf": 0, - "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" + "worker": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426" }, "fltask": "", "inModels": None, - "key": "9271dbc9d629c5d3bccd4c6f269f54e0d253fb5c53d3de958159605778b3de29", - "log": "[00-01-0032-899a79c]", + "key": "c6beed3a4ee5ead0c4246faac7931a944fc2286e193454bb1b851dee0c5a5f59", + "log": "[00-01-0032-139c39e]", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" } }, @@ -226,33 +226,33 @@ testtuple = [ { - "key": "cc0c0465c6aff2fd195bcc8e2ad45379f991a141224340f984d61556e6bfd09c", + "key": "b7b9291e5ff96ec7d16d38ab49915cbe15055347bb933a824887f2a76fb57c9a", "algo": { "name": "Logistic regression", - "hash": "da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b", - "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" + "hash": "4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7", + "storageAddress": "http://testserver/algo/4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7/file/" }, "certified": True, - "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "creator": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "dataset": { - "worker": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18", + "worker": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7", "keys": [ "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a", "8bf3bf4f753a32f27d18c86405e7a406a83a55610d91abcca9acc525061b8ecf" ], - "openerHash": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", + "openerHash": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", "perf": 0 }, - "log": "Test - CPU:0.00 % - Mem:0.00 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", + "log": "Test - CPU:179.46 % - Mem:0.09 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", "model": { - "traintupleKey": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1", - "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", - "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/" + "traintupleKey": "3979576752e014adddadfc360d79c67cdccb0f4bae46936f35ce09c64e5832c8", + "hash": "592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075", + "storageAddress": "http://testserver/model/592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075/file/" }, "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" } }, @@ -266,32 +266,32 @@ { "testtuple": { "algo": { - "hash": "da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b", + "hash": "4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7", "name": "Logistic regression", - "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" + "storageAddress": "http://testserver/algo/4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7/file/" }, "certified": True, - "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "creator": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "dataset": { "keys": [ "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a", "8bf3bf4f753a32f27d18c86405e7a406a83a55610d91abcca9acc525061b8ecf" ], - "openerHash": "9a832ed6cee6acf7e33c3acffbc89cebf10ef503b690711bdee048b873daf528", + "openerHash": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", "perf": 0, - "worker": "506fb2dd5891731166847208f7a7d1b17371c577af72f26286bb81c730c18a18" + "worker": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7" }, - "key": "cc0c0465c6aff2fd195bcc8e2ad45379f991a141224340f984d61556e6bfd09c", - "log": "Test - CPU:0.00 % - Mem:0.00 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", + "key": "b7b9291e5ff96ec7d16d38ab49915cbe15055347bb933a824887f2a76fb57c9a", + "log": "Test - CPU:179.46 % - Mem:0.09 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", "model": { - "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", - "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/", - "traintupleKey": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1" + "hash": "592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075", + "storageAddress": "http://testserver/model/592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075/file/", + "traintupleKey": "3979576752e014adddadfc360d79c67cdccb0f4bae46936f35ce09c64e5832c8" }, "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" } }, @@ -301,34 +301,34 @@ }, "traintuple": { "algo": { - "hash": "da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b", + "hash": "4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7", "name": "Logistic regression", - "storageAddress": "http://testserver/algo/da58a7a29b549f2fe5f009fb51cce6b28ca184ec641a0c1db075729bb266549b/file/" + "storageAddress": "http://testserver/algo/4cc53726e01f7e3864a6cf9da24d9cef04a7cbd7fd2892765ff76931dd4628e7/file/" }, - "creator": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22", + "creator": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426", "dataset": { "keys": [ - "bcdda7da240f1de016e5c185d63027ff6536c233f7ed96d086766e99027d4e24", + "31510dc1d8be788f7c5d28d05714f7efb9edb667762966b9adc02eadeaacebe9", "03a1f878768ea8624942d46a3b438c37992e626c2cf655023bcc3bed69d485d1" ], - "openerHash": "59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd", + "openerHash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "perf": 1, - "worker": "7e710b07840296d00cb5a12c60e5c2f4dfeaae71064dd257f9875ed4d7637d22" + "worker": "2cb13d299b337fae2969da1ff4ddd9a2f3004be52d64f671d13d9513f5a79426" }, "fltask": "", "inModels": None, - "key": "1ef64eb72db5d8d8aed6a35582e83487db5d085215678561283c54abace649a1", - "log": "Train - CPU:77.60 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", + "key": "3979576752e014adddadfc360d79c67cdccb0f4bae46936f35ce09c64e5832c8", + "log": "Train - CPU:173.81 % - Mem:0.11 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", "objective": { "hash": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "metrics": { - "hash": "750f622262854341bd44f55c1018949e9c119606ef5068bd7d137040a482a756", + "hash": "c42dca31fbc2ebb5705643e3bb6ee666bbfd956de13dd03727f825ad8445b4d7", "storageAddress": "http://testserver/objective/3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71/metrics/" } }, "outModel": { - "hash": "e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b", - "storageAddress": "http://testserver/model/e87a2d0a70a084acebf038b95790850cc72a96dff684f07ea1dc6a58dd03882b/file/" + "hash": "592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075", + "storageAddress": "http://testserver/model/592242f9b162178994897c5b8aa49450a17cc395bb9bc9864b830a6cdba6a075/file/" }, "permissions": "all", "rank": 0, From 6590fa46740e346778f100e4b077cf6dfd059f4e Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 2 May 2019 14:14:39 +0200 Subject: [PATCH 079/106] update asset hashes --- substrabac/substrapp/fixtures/model.py | 10 +++++----- substrabac/substrapp/tests/assets.py | 14 +++++++------- .../substrapp/tests/tests_createobjective.py | 2 +- substrabac/substrapp/tests/tests_views.py | 4 ++-- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/substrabac/substrapp/fixtures/model.py b/substrabac/substrapp/fixtures/model.py index 1d895892a..293df3c97 100644 --- a/substrabac/substrapp/fixtures/model.py +++ b/substrabac/substrapp/fixtures/model.py @@ -34,7 +34,7 @@ 'trainDataSample': { 'keys': ['62fb3263208d62c7235a046ee1d80e25512fe782254b730a9e566276b8c0ef3a', '42303efa663015e729159833a12ffb510ff92a6e386b8152f90f6fb14ddc94c9'], - 'openerHash': '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd', + 'openerHash': '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7', 'perf': 0.50, 'worker': 'a3119c79a173581425cbe6e06c3034ec396ee805b60d9a34feaa3048beb0e4a9', }, @@ -74,7 +74,7 @@ 'trainDataSample': { 'keys': ['62fb3263208d62c7235a046ee1d80e25512fe782254b730a9e566276b8c0ef3a', '42303efa663015e729159833a12ffb510ff92a6e386b8152f90f6fb14ddc94c9'], - 'openerHash': '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd', + 'openerHash': '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7', 'perf': 0.70, 'worker': 'a3119c79a173581425cbe6e06c3034ec396ee805b60d9a34feaa3048beb0e4a9', }, @@ -114,7 +114,7 @@ 'trainDataSample': { 'keys': ['62fb3263208d62c7235a046ee1d80e25512fe782254b730a9e566276b8c0ef3a', '42303efa663015e729159833a12ffb510ff92a6e386b8152f90f6fb14ddc94c9'], - 'openerHash': '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd', + 'openerHash': '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7', 'perf': 0.79, 'worker': 'a3119c79a173581425cbe6e06c3034ec396ee805b60d9a34feaa3048beb0e4a9', }, @@ -154,7 +154,7 @@ 'trainDataSample': { 'keys': ['62fb3263208d62c7235a046ee1d80e25512fe782254b730a9e566276b8c0ef3a', '42303efa663015e729159833a12ffb510ff92a6e386b8152f90f6fb14ddc94c9'], - 'openerHash': '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd', + 'openerHash': '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7', 'perf': 0.79, 'worker': 'a3119c79a173581425cbe6e06c3034ec396ee805b60d9a34feaa3048beb0e4a9', }, @@ -194,7 +194,7 @@ 'trainDataSample': { 'keys': ['62fb3263208d62c7235a046ee1d80e25512fe782254b730a9e566276b8c0ef3a', '42303efa663015e729159833a12ffb510ff92a6e386b8152f90f6fb14ddc94c9'], - 'openerHash': '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd', + 'openerHash': '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7', 'perf': 0.79, 'worker': 'a3119c79a173581425cbe6e06c3034ec396ee805b60d9a34feaa3048beb0e4a9', }, diff --git a/substrabac/substrapp/tests/assets.py b/substrabac/substrapp/tests/assets.py index 50dfd0e8e..24c6fd30e 100644 --- a/substrabac/substrapp/tests/assets.py +++ b/substrabac/substrapp/tests/assets.py @@ -29,7 +29,7 @@ }, "owner": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7", "testDataset": { - "dataManagerKey": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", + "dataManagerKey": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "dataSampleKeys": [ "8bf3bf4f753a32f27d18c86405e7a406a83a55610d91abcca9acc525061b8ecf", "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a" @@ -60,13 +60,13 @@ "objectiveKey": "3d70ab46d710dacb0f48cb42db4874fac14e048a0d415e266aad38c09591ee71", "description": { "hash": "258bef187a166b3fef5cb86e68c8f7e154c283a148cd5bc344fec7e698821ad3", - "storageAddress": "http://testserver/data_manager/82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2/description/" + "storageAddress": "http://testserver/data_manager/615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7/description/" }, - "key": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", + "key": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "name": "Simplified ISIC 2018", "opener": { - "hash": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", - "storageAddress": "http://testserver/data_manager/82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2/opener/" + "hash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", + "storageAddress": "http://testserver/data_manager/615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7/opener/" }, "owner": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7", "permissions": "all", @@ -240,7 +240,7 @@ "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a", "8bf3bf4f753a32f27d18c86405e7a406a83a55610d91abcca9acc525061b8ecf" ], - "openerHash": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", + "openerHash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "perf": 0 }, "log": "Test - CPU:179.46 % - Mem:0.09 GB - GPU:0.00 % - GPU Mem:0.00 GB; ", @@ -277,7 +277,7 @@ "17d58b67ae2028018108c9bf555fa58b2ddcfe560e0117294196e79d26140b2a", "8bf3bf4f753a32f27d18c86405e7a406a83a55610d91abcca9acc525061b8ecf" ], - "openerHash": "82e841c49822b2abcab9e95fe9ae359316d70ab5f627a28b0b67618dd945b2c2", + "openerHash": "615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7", "perf": 0, "worker": "fba9c2538319fe2b45ac7047e21b4bc7196537367814d5da7f0aae020d3be5f7" }, diff --git a/substrabac/substrapp/tests/tests_createobjective.py b/substrabac/substrapp/tests/tests_createobjective.py index 9df1beff3..1005011f5 100644 --- a/substrabac/substrapp/tests/tests_createobjective.py +++ b/substrabac/substrapp/tests/tests_createobjective.py @@ -69,7 +69,7 @@ def test_createobjective(self): } objective_pk = 'd5002e1cd50bd5de5341df8a7b7d11b6437154b3b08f531c9b8f93889855c66f' - datamanager_pk = '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd' + datamanager_pk = '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7' pkhash1 = '24fb12ff87485f6b0bc5349e5bf7f36ccca4eb1353395417fdae7d8d787f178c' pkhash2 = '30f6c797e277451b0a08da7119ed86fb2986fa7fab2258bf3edbd9f1752ed553' diff --git a/substrabac/substrapp/tests/tests_views.py b/substrabac/substrapp/tests/tests_views.py index 924395064..283590f04 100644 --- a/substrabac/substrapp/tests/tests_views.py +++ b/substrabac/substrapp/tests/tests_views.py @@ -719,7 +719,7 @@ def test_datamanager_list_filter_model(self): def test_datamanager_retrieve(self): url = reverse('substrapp:data_manager-list') - datamanager_response = [d for d in datamanager if d['key'] == '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd'][0] + datamanager_response = [d for d in datamanager if d['key'] == '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7'][0] with mock.patch.object(DataManagerViewSet, 'getObjectFromLedger') as mgetObjectFromLedger, \ mock.patch('substrapp.views.datamanager.requests.get') as mrequestsget: mgetObjectFromLedger.return_value = datamanager_response @@ -737,7 +737,7 @@ def test_datamanager_retrieve(self): FakeRequest(status=status.HTTP_200_OK, content=description_content)] - search_params = '59300f1fec4f5cdd3a236c7260ed72bdd24691efdec63b7910ea84136123cecd/' + search_params = '615ce631b93c185b492dfc97ed5dea27430d871fa4e50678bab3c79ce2ec6cb7/' response = self.client.get(url + search_params, **self.extra) r = response.json() From 0a7373544111cff429fd46ba97a7bc3b82de0af0 Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 2 May 2019 14:38:24 +0200 Subject: [PATCH 080/106] datamanager: check import substratools --- substrabac/substrapp/views/datamanager.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index 283023ec6..2b88bded5 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -45,18 +45,10 @@ def dryrun(self, data_opener): return Response({'message': f'Opener must be a valid python file, please review your opener file and the documentation.'}, status=status.HTTP_400_BAD_REQUEST) - funcs_args = {n.name: {arg.arg for arg in n.args.args} for n in node.body if isinstance(n, ast.FunctionDef)} - - for mfunc, margs in mandatory_functions.items(): - try: - args = funcs_args[mfunc] - except: - return Response({'message': f'Opener must have a "{mfunc}" function, please review your opener and the documentation.'}, - status=status.HTTP_400_BAD_REQUEST) - else: - if not margs.issubset(args): - return Response({'message': f'Opener function "{mfunc}" must have at least {margs} arguments, please review your opener and the documentation.'}, - status=status.HTTP_400_BAD_REQUEST) + imported_module_names = [m.name for e in node.body if isinstance(e, ast.Import) for m in e.names] + if 'substratools' not in imported_module_names: + return Response({'message': f'Opener must import substratools, please review your opener and the documentation.'}, + status=status.HTTP_400_BAD_REQUEST) return Response({'message': f'Your data opener is valid. You can remove the dryrun option.'}, status=status.HTTP_200_OK) From 6c3e38b03b650972303976b05f9deae8f9705bca Mon Sep 17 00:00:00 2001 From: Samuel Date: Thu, 2 May 2019 14:45:33 +0200 Subject: [PATCH 081/106] cosmetic fixes --- substrabac/substrapp/views/datamanager.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index 2b88bded5..b9fc0943e 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -35,8 +35,6 @@ def perform_create(self, serializer): def dryrun(self, data_opener): - mandatory_functions = {} # currently check by the substratools pacakge - file = data_opener.open().read() try: @@ -47,7 +45,7 @@ def dryrun(self, data_opener): imported_module_names = [m.name for e in node.body if isinstance(e, ast.Import) for m in e.names] if 'substratools' not in imported_module_names: - return Response({'message': f'Opener must import substratools, please review your opener and the documentation.'}, + return Response({'message': 'Opener must import substratools, please review your opener and the documentation.'}, status=status.HTTP_400_BAD_REQUEST) return Response({'message': f'Your data opener is valid. You can remove the dryrun option.'}, From 6cdf9803db323c41a3206f87126311b41240008c Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 2 May 2019 18:13:11 +0200 Subject: [PATCH 082/106] Update docker along new substra-network --- docker/start.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/docker/start.py b/docker/start.py index f76ef3b5e..34524991f 100644 --- a/docker/start.py +++ b/docker/start.py @@ -76,7 +76,7 @@ def generate_docker_compose_file(conf, launch_settings): for org in conf: org_name = org['name'] - orderer = org['orderer']['name'] + orderer_ca = org['orderer']['ca'] peer = org['peer']['name'] org_name_stripped = org_name.replace('-', '') @@ -111,11 +111,11 @@ def generate_docker_compose_file(conf, launch_settings): '/substra/dryrun:/substra/dryrun', '/substra/static:/usr/src/app/substrabac/statics', f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'/substra/data/orgs/{orderer}/ca-cert.pem:/substra/data/orgs/{orderer}/ca-cert.pem', + f'{orderer_ca}:{orderer_ca}', f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'/substra/data/orgs/{org_name}/user/msp:/substra/data/orgs/{org_name}/user/msp', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', - f'/substra/data/orgs/{org_name}/user/msp:/opt/gopath/src/github.com/hyperledger/fabric/peer/msp'], + ], 'depends_on': ['postgresql', 'rabbit']} scheduler = {'container_name': f'{org_name_stripped}.scheduler', @@ -139,11 +139,11 @@ def generate_docker_compose_file(conf, launch_settings): f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': [f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'/substra/data/orgs/{orderer}/ca-cert.pem:/substra/data/orgs/{orderer}/ca-cert.pem', + f'{orderer_ca}:{orderer_ca}', f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'/substra/data/orgs/{org_name}/user/msp:/substra/data/orgs/{org_name}/user/msp', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', - f'/substra/data/orgs/{org_name}/user/msp:/opt/gopath/src/github.com/hyperledger/fabric/peer/msp'], + ], 'depends_on': [f'substrabac{org_name_stripped}', 'postgresql', 'rabbit']} worker = {'container_name': f'{org_name_stripped}.worker', @@ -170,11 +170,11 @@ def generate_docker_compose_file(conf, launch_settings): '/substra/medias:/substra/medias', '/substra/servermedias:/substra/servermedias', f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'/substra/data/orgs/{orderer}/ca-cert.pem:/substra/data/orgs/{orderer}/ca-cert.pem', + f'{orderer_ca}:{orderer_ca}', f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'/substra/data/orgs/{org_name}/user/msp:/substra/data/orgs/{org_name}/user/msp', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', - f'/substra/data/orgs/{org_name}/user/msp:/opt/gopath/src/github.com/hyperledger/fabric/peer/msp'], + ], 'depends_on': [f'substrabac{org_name_stripped}', 'rabbit']} dryrunner = {'container_name': f'{org_name_stripped}.dryrunner', @@ -202,11 +202,11 @@ def generate_docker_compose_file(conf, launch_settings): '/substra/servermedias:/substra/servermedias', '/substra/dryrun:/substra/dryrun', f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'/substra/data/orgs/{orderer}/ca-cert.pem:/substra/data/orgs/{orderer}/ca-cert.pem', + f'{orderer_ca}:{orderer_ca}', f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'/substra/data/orgs/{org_name}/user/msp:/substra/data/orgs/{org_name}/user/msp', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', - f'/substra/data/orgs/{org_name}/user/msp:/opt/gopath/src/github.com/hyperledger/fabric/peer/msp'], + ], 'depends_on': [f'substrabac{org_name_stripped}', 'rabbit']} # Check if we have nvidia docker From 7b64e5f9d07070df1addd7393c9a16394928d07b Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Fri, 3 May 2019 17:00:57 +0200 Subject: [PATCH 083/106] support name with : --- substrabac/substrapp/views/objective.py | 2 ++ substrabac/substrapp/views/utils.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index 7ffbe6e7f..e45ee69c6 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -6,6 +6,8 @@ import tempfile import uuid +from urllib.parse import unquote + import requests from django.conf import settings from django.db import IntegrityError diff --git a/substrabac/substrapp/views/utils.py b/substrabac/substrapp/views/utils.py index 3d069cfc5..52e691fe1 100644 --- a/substrabac/substrapp/views/utils.py +++ b/substrabac/substrapp/views/utils.py @@ -35,7 +35,7 @@ def get_filters(query_params): value = el[2] filter = { - subparent: [value] + subparent: [unquote(value)] } if not len(filters[idx]): # create and add it From 1b68c5634ada603de65d06f4ac3ce0e7c7cd79b3 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Fri, 3 May 2019 17:33:45 +0200 Subject: [PATCH 084/106] Fix issue with dryrun exception --- substrabac/substrapp/views/datasample.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index 5488a4db7..a85579747 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -91,7 +91,6 @@ def compute_dryrun(self, data, data_manager_keys): volumes.update({data_path: {'bind': '/sandbox/data', 'mode': 'rw'}, opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}}) - client.images.build(path=data_sample_docker_path, tag=data_docker, rm=False) @@ -112,8 +111,6 @@ def compute_dryrun(self, data, data_manager_keys): except ContainerError as e: raise Exception(e.stderr) - except Exception as e: - raise str(e) finally: try: container = client.containers.get(data_docker_name) From 27abc3e0e3cd03833cf158e13bdf70c42d0b48c4 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 6 May 2019 14:58:36 +0200 Subject: [PATCH 085/106] Fix issue with no data provided. --- substrabac/substrapp/views/datasample.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index a85579747..5b3bb30d1 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -233,6 +233,9 @@ def compute_data(self, request): 'path': normpath(path) } + if not data: + raise Exception(f'No data sample provided.') + return list(data.values()) def handle_dryrun(self, data, data_manager_keys): From d46be75cdc0ae8c900a688b917527b4c917bf3d3 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 6 May 2019 14:38:01 +0200 Subject: [PATCH 086/106] Handle 404 when element does not exist. --- substrabac/substrapp/utils.py | 4 +++- substrabac/substrapp/views/algo.py | 2 ++ substrabac/substrapp/views/datamanager.py | 5 +++++ substrabac/substrapp/views/model.py | 2 ++ substrabac/substrapp/views/objective.py | 3 ++- substrabac/substrapp/views/testtuple.py | 3 +++ substrabac/substrapp/views/traintuple.py | 7 +++++++ substrabac/substrapp/views/utils.py | 7 ++++++- 8 files changed, 30 insertions(+), 3 deletions(-) diff --git a/substrabac/substrapp/utils.py b/substrabac/substrapp/utils.py index a2bdaea8d..9e15cfe4f 100644 --- a/substrabac/substrapp/utils.py +++ b/substrabac/substrapp/utils.py @@ -69,7 +69,7 @@ def queryLedger(options): print(msg, flush=True) else: try: - msg = output.stderr.decode('utf-8').split('Error')[2].split('\n')[0] + msg = output.stderr.decode('utf-8').split('Error')[-1].split('\n')[0] data = {'message': msg} except: msg = output.stderr.decode('utf-8') @@ -78,6 +78,8 @@ def queryLedger(options): st = status.HTTP_400_BAD_REQUEST if 'access denied' in msg: st = status.HTTP_403_FORBIDDEN + elif 'no element with key' in msg: + st = status.HTTP_404_NOT_FOUND clean_env_variables() diff --git a/substrabac/substrapp/views/algo.py b/substrabac/substrapp/views/algo.py index 1cdd0426c..bad64fffe 100644 --- a/substrabac/substrapp/views/algo.py +++ b/substrabac/substrapp/views/algo.py @@ -127,6 +127,8 @@ def retrieve(self, request, *args, **kwargs): data = getObjectFromLedger(pk, self.ledger_query_call) except JsonException as e: return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) + except Http404: + return Response(f'No element with key {pk}', status=status.HTTP_404_NOT_FOUND) else: try: # try to get it from local db to check if description exists diff --git a/substrabac/substrapp/views/datamanager.py b/substrabac/substrapp/views/datamanager.py index b9fc0943e..783697216 100644 --- a/substrabac/substrapp/views/datamanager.py +++ b/substrabac/substrapp/views/datamanager.py @@ -177,6 +177,9 @@ def getObjectFromLedger(self, pk): 'args': f'{{"Args":["queryDataset", "{pk}"]}}' }) + if st == status.HTTP_404_NOT_FOUND: + raise Http404('Not found') + if st != status.HTTP_200_OK: raise JsonException(data) @@ -202,6 +205,8 @@ def retrieve(self, request, *args, **kwargs): data = self.getObjectFromLedger(pk) # datamanager use particular query to ledger except JsonException as e: return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) + except Http404: + return Response(f'No element with key {pk}', status=status.HTTP_404_NOT_FOUND) else: error = None instance = None diff --git a/substrabac/substrapp/views/model.py b/substrabac/substrapp/views/model.py index fb367d2b0..c2434d565 100644 --- a/substrabac/substrapp/views/model.py +++ b/substrabac/substrapp/views/model.py @@ -78,6 +78,8 @@ def retrieve(self, request, *args, **kwargs): data = getObjectFromLedger(pk, 'queryModelDetails') except JsonException as e: return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) + except Http404: + return Response(f'No element with key {pk}', status=status.HTTP_404_NOT_FOUND) else: error = None instance = None diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index e45ee69c6..5a9bd5ccc 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -265,7 +265,8 @@ def retrieve(self, request, *args, **kwargs): data = getObjectFromLedger(pk, self.ledger_query_call) except JsonException as e: return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) - + except Http404: + return Response(f'No element with key {pk}', status=status.HTTP_404_NOT_FOUND) # try to get it from local db to check if description exists try: instance = self.get_object() diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index 4baf7f3de..fcb62b120 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -1,5 +1,6 @@ import json +from django.http import Http404 from rest_framework import mixins, status from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet @@ -134,5 +135,7 @@ def retrieve(self, request, *args, **kwargs): data = getObjectFromLedger(pk, 'queryTesttuple') except JsonException as e: return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) + except Http404: + return Response(f'No element with key {pk}', status=status.HTTP_404_NOT_FOUND) else: return Response(data, status=status.HTTP_200_OK) diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index ee164a6ea..6fa889077 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -1,4 +1,6 @@ import json + +from django.http import Http404 from rest_framework import mixins, status from rest_framework.response import Response from rest_framework.viewsets import GenericViewSet @@ -132,6 +134,9 @@ def getObjectFromLedger(self, pk): 'args': f'{{"Args":["queryTraintuple","{pk}"]}}' }) + if st == status.HTTP_404_NOT_FOUND: + raise Http404('Not found') + if st != status.HTTP_200_OK: raise JsonException(data) @@ -157,5 +162,7 @@ def retrieve(self, request, *args, **kwargs): data = self.getObjectFromLedger(pk) except JsonException as e: return Response(e.msg, status=status.HTTP_400_BAD_REQUEST) + except Http404: + return Response(f'No element with key {pk}', status=status.HTTP_404_NOT_FOUND) else: return Response(data, status=status.HTTP_200_OK) diff --git a/substrabac/substrapp/views/utils.py b/substrabac/substrapp/views/utils.py index 52e691fe1..182058283 100644 --- a/substrabac/substrapp/views/utils.py +++ b/substrabac/substrapp/views/utils.py @@ -2,7 +2,7 @@ import os from urllib.parse import unquote -from django.http import FileResponse +from django.http import FileResponse, Http404 from rest_framework import status from rest_framework.response import Response @@ -60,6 +60,9 @@ def getObjectFromLedger(pk, query): 'args': f'{{"Args":["{query}","{pk}"]}}' }) + if st == status.HTTP_404_NOT_FOUND: + raise Http404('Not found') + if st != status.HTTP_200_OK: raise JsonException(data) @@ -103,6 +106,8 @@ def manage_file(self, field): getObjectFromLedger(pk, self.ledger_query_call) except Exception as e: return Response(e, status=status.HTTP_400_BAD_REQUEST) + except Http404: + return Response(f'No element with key {pk}', status=status.HTTP_404_NOT_FOUND) else: object = self.get_object() From af1a48db3b1fb5224785511ea5077f2fe5ddd7f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9my=20Morel?= Date: Mon, 6 May 2019 15:40:11 +0200 Subject: [PATCH 087/106] JSON parsable responses for empty test/traintuple lists --- substrabac/substrapp/views/testtuple.py | 2 ++ substrabac/substrapp/views/traintuple.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/substrabac/substrapp/views/testtuple.py b/substrabac/substrapp/views/testtuple.py index 4baf7f3de..d7b864bde 100644 --- a/substrabac/substrapp/views/testtuple.py +++ b/substrabac/substrapp/views/testtuple.py @@ -115,6 +115,8 @@ def list(self, request, *args, **kwargs): 'args': '{"Args":["queryTesttuples"]}' }) + data = data if data else [] + return Response(data, status=st) def retrieve(self, request, *args, **kwargs): diff --git a/substrabac/substrapp/views/traintuple.py b/substrabac/substrapp/views/traintuple.py index ee164a6ea..4f93b6e5e 100644 --- a/substrabac/substrapp/views/traintuple.py +++ b/substrabac/substrapp/views/traintuple.py @@ -124,6 +124,8 @@ def list(self, request, *args, **kwargs): 'args': '{"Args":["queryTraintuples"]}' }) + data = data if data else [] + return Response(data, status=st) def getObjectFromLedger(self, pk): From e48d1acbd2af9453d06da9984c97efe17cb70b30 Mon Sep 17 00:00:00 2001 From: Samuel Date: Mon, 6 May 2019 17:23:15 +0200 Subject: [PATCH 088/106] retrieve model was buggy --- substrabac/substrapp/views/model.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/substrabac/substrapp/views/model.py b/substrabac/substrapp/views/model.py index fb367d2b0..dc7c7e528 100644 --- a/substrabac/substrapp/views/model.py +++ b/substrabac/substrapp/views/model.py @@ -86,14 +86,16 @@ def retrieve(self, request, *args, **kwargs): instance = self.get_object() except Http404: try: - instance = self.create_or_update_model(data, pk) + instance = self.create_or_update_model(data['traintuple'], + data['traintuple']['outModel']['hash']) except Exception as e: error = e else: # check if instance has file if not instance.file: try: - instance = self.create_or_update_model(data, pk) + instance = self.create_or_update_model(data['traintuple'], + data['traintuple']['outModel']['hash']) except Exception as e: error = e finally: From f69bbd06a5cb5a64dd3b5bb17053576bc54bcbd0 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 6 May 2019 18:56:14 +0200 Subject: [PATCH 089/106] Fix post delete signals --- substrabac/substrapp/signals/algo/post_delete.py | 5 +++-- substrabac/substrapp/signals/datamanager/post_delete.py | 5 +++-- substrabac/substrapp/signals/model/post_delete.py | 5 +++-- substrabac/substrapp/signals/objective/post_delete.py | 5 +++-- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/substrabac/substrapp/signals/algo/post_delete.py b/substrabac/substrapp/signals/algo/post_delete.py index d8518f8cb..274af3e72 100644 --- a/substrabac/substrapp/signals/algo/post_delete.py +++ b/substrabac/substrapp/signals/algo/post_delete.py @@ -1,4 +1,5 @@ -from os import path, rmdir +import shutil +from os import path from django.conf import settings @@ -7,4 +8,4 @@ def algo_post_delete(sender, instance, **kwargs): instance.description.delete(False) directory = path.join(getattr(settings, 'MEDIA_ROOT'), 'algos/{0}'.format(instance.pk)) - rmdir(directory) + shutil.rmtree(directory) diff --git a/substrabac/substrapp/signals/datamanager/post_delete.py b/substrabac/substrapp/signals/datamanager/post_delete.py index fcde18748..e6642f3f1 100644 --- a/substrabac/substrapp/signals/datamanager/post_delete.py +++ b/substrabac/substrapp/signals/datamanager/post_delete.py @@ -1,4 +1,5 @@ -from os import path, rmdir +import shutil +from os import path from django.conf import settings @@ -7,4 +8,4 @@ def datamanager_post_delete(sender, instance, **kwargs): instance.description.delete(False) directory = path.join(getattr(settings, 'MEDIA_ROOT'), 'datamanagers/{0}'.format(instance.pk)) - rmdir(directory) + shutil.rmtree(directory) diff --git a/substrabac/substrapp/signals/model/post_delete.py b/substrabac/substrapp/signals/model/post_delete.py index 2d2dcc6bf..d26eeb1c1 100644 --- a/substrabac/substrapp/signals/model/post_delete.py +++ b/substrabac/substrapp/signals/model/post_delete.py @@ -1,4 +1,5 @@ -from os import path, rmdir +import shutil +from os import path from django.conf import settings @@ -6,4 +7,4 @@ def model_post_delete(sender, instance, **kwargs): instance.file.delete(False) directory = path.join(getattr(settings, 'MEDIA_ROOT'), 'models/{0}'.format(instance.pk)) - rmdir(directory) + shutil.rmtree(directory) diff --git a/substrabac/substrapp/signals/objective/post_delete.py b/substrabac/substrapp/signals/objective/post_delete.py index e5f64e1f2..5b1f71ced 100644 --- a/substrabac/substrapp/signals/objective/post_delete.py +++ b/substrabac/substrapp/signals/objective/post_delete.py @@ -1,4 +1,5 @@ -from os import path, rmdir +import shutil +from os import path from django.conf import settings @@ -7,4 +8,4 @@ def objective_post_delete(sender, instance, **kwargs): instance.metrics.delete(False) directory = path.join(getattr(settings, 'MEDIA_ROOT'), 'objectives/{0}'.format(instance.pk)) - rmdir(directory) + shutil.rmtree(directory) From f61afb09220e754011028372d94833b933518f4b Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 7 May 2019 10:03:23 +0200 Subject: [PATCH 090/106] Fix conflict between dry-run folders. --- substrabac/substrapp/views/datasample.py | 5 +++-- substrabac/substrapp/views/objective.py | 7 +++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index a85579747..8c6fdc639 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -58,7 +58,8 @@ def compute_dryrun(self, data, data_manager_keys): # Name of the dry-run subtuple (not important) pkhash = data[0]['pkhash'] - subtuple_directory = build_subtuple_folders({'key': pkhash}) + dryrun_uuid = f'{pkhash}_{uuid.uuid4().hex}' + subtuple_directory = build_subtuple_folders({'key': dryrun_uuid}) data_path = os.path.join(subtuple_directory, 'data') volumes = {} @@ -86,7 +87,7 @@ def compute_dryrun(self, data, data_manager_keys): data_sample_docker_path = os.path.join(getattr(settings, 'PROJECT_ROOT'), 'fake_data_sample') # fake_data comes with substrabac data_docker = 'data_dry_run' # tag must be lowercase for docker - data_docker_name = f'{data_docker}_{pkhash}_{uuid.uuid4().hex}' + data_docker_name = f'{data_docker}_{dryrun_uuid}' volumes.update({data_path: {'bind': '/sandbox/data', 'mode': 'rw'}, opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}}) diff --git a/substrabac/substrapp/views/objective.py b/substrabac/substrapp/views/objective.py index e45ee69c6..a69d8435a 100644 --- a/substrabac/substrapp/views/objective.py +++ b/substrabac/substrapp/views/objective.py @@ -33,7 +33,10 @@ @app.task(bind=True, ignore_result=False) def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): - subtuple_directory = build_subtuple_folders({'key': pkhash}) + + dryrun_uuid = f'{pkhash}_{uuid.uuid4().hex}' + + subtuple_directory = build_subtuple_folders({'key': dryrun_uuid}) metrics_path_dst = os.path.join(subtuple_directory, 'metrics/metrics.py') if not os.path.exists(metrics_path_dst): @@ -56,7 +59,7 @@ def compute_dryrun(self, metrics_path, test_data_manager_key, pkhash): metrics_path = os.path.join(getattr(settings, 'PROJECT_ROOT'), 'fake_metrics') # base metrics comes with substrabac metrics_docker = 'metrics_dry_run' # tag must be lowercase for docker - metrics_docker_name = f'{metrics_docker}_{pkhash}_{uuid.uuid4().hex}' + metrics_docker_name = f'{metrics_docker}_{dryrun_uuid}' volumes = {pred_path: {'bind': '/sandbox/pred', 'mode': 'rw'}, metrics_file: {'bind': '/sandbox/metrics/__init__.py', 'mode': 'ro'}, opener_file: {'bind': '/sandbox/opener/__init__.py', 'mode': 'ro'}} From 124c2a76a5c8cb4048f2510e3613519a3288128d Mon Sep 17 00:00:00 2001 From: Samuel Date: Tue, 7 May 2019 10:30:11 +0200 Subject: [PATCH 091/106] fix associated tests --- substrabac/substrapp/tests/tests_views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/substrabac/substrapp/tests/tests_views.py b/substrabac/substrapp/tests/tests_views.py index 283590f04..c9f45207a 100644 --- a/substrabac/substrapp/tests/tests_views.py +++ b/substrabac/substrapp/tests/tests_views.py @@ -591,7 +591,7 @@ def test_model_retrieve(self): with mock.patch('substrapp.views.model.getObjectFromLedger') as mgetObjectFromLedger, \ mock.patch('substrapp.views.model.requests.get') as mrequestsget, \ mock.patch('substrapp.views.model.ModelViewSet.compute_hash') as mcomputed_hash: - mgetObjectFromLedger.return_value = model[0]['traintuple'] + mgetObjectFromLedger.return_value = model[0] mrequestsget.return_value = FakeRequest(status=status.HTTP_200_OK, content=self.model.read().encode()) @@ -602,7 +602,7 @@ def test_model_retrieve(self): search_params = model[0]['traintuple']['outModel']['hash'] + '/' response = self.client.get(url + search_params, **self.extra) r = response.json() - self.assertEqual(r, model[0]['traintuple']) + self.assertEqual(r, model[0]) def test_model_retrieve_fail(self): From 3227583cda2b6bbddc2d4d8018e6bb0348cb5f27 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 7 May 2019 14:53:32 +0200 Subject: [PATCH 092/106] Fix data sample dry run --- substrabac/substrapp/views/datasample.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index a85579747..1ea07cbfd 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -236,6 +236,8 @@ def compute_data(self, request): return list(data.values()) def handle_dryrun(self, data, data_manager_keys): + data_dry_run = [] + # write uploaded file to disk for d in data: pkhash = d['pkhash'] @@ -245,8 +247,16 @@ def handle_dryrun(self, data, data_manager_keys): with open(file_path, 'wb') as f: f.write(d['file'].open().read()) + data_dry_run.append({ + 'pkhash': pkhash, + 'file': file_path + }) + + if 'path' in d: + data_dry_run.append(d) + try: - task, msg = self.dryrun_task(data, data_manager_keys) + task, msg = self.dryrun_task(data_dry_run, data_manager_keys) except Exception as e: return Exception(f'Could not launch data creation with dry-run on this instance: {str(e)}') else: From 95da38a80232d8e4a28f6b1d73fa91c46faca0aa Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 9 May 2019 15:29:05 +0200 Subject: [PATCH 093/106] Fix domain name for celery dockers. --- docker/start.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/start.py b/docker/start.py index 61d86d4ad..d5fd846f6 100644 --- a/docker/start.py +++ b/docker/start.py @@ -231,6 +231,9 @@ def generate_docker_compose_file(conf, launch_settings): default_domain = os.environ.get('SUBSTRABAC_DEFAULT_DOMAIN', '') if default_domain: backend['environment'].append(f"DEFAULT_DOMAIN={default_domain}") + worker['environment'].append(f"DEFAULT_DOMAIN={default_domain}") + scheduler['environment'].append(f"DEFAULT_DOMAIN={default_domain}") + dryrunner['environment'].append(f"DEFAULT_DOMAIN={default_domain}") scheduler['environment'].append(f"RAVEN_URL={raven_scheduler_url}") worker['environment'].append(f"RAVEN_URL={raven_worker_url}") dryrunner['environment'].append(f"RAVEN_URL={raven_dryrunner_url}") From 124abbd74262c5158e57004aa6811551b3ed06e7 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Thu, 9 May 2019 17:16:48 +0200 Subject: [PATCH 094/106] Fix issue with logging rotate on db --- docker/start.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/start.py b/docker/start.py index d5fd846f6..f229618be 100644 --- a/docker/start.py +++ b/docker/start.py @@ -42,6 +42,7 @@ def generate_docker_compose_file(conf, launch_settings): 'substrabac_tools': {'postgresql': {'container_name': 'postgresql', 'image': 'library/postgres:10.5', 'restart': 'unless-stopped', + 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': [f'POSTGRES_USER={POSTGRES_USER}', f'USER={USER}', f'POSTGRES_PASSWORD={POSTGRES_PASSWORD}', @@ -65,6 +66,7 @@ def generate_docker_compose_file(conf, launch_settings): 'hostname': 'rabbitmq', # Must be set to be able to recover from volume 'restart': 'unless-stopped', 'image': 'rabbitmq:3', + 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': [f'RABBITMQ_DEFAULT_USER={RABBITMQ_DEFAULT_USER}', f'RABBITMQ_DEFAULT_PASS={RABBITMQ_DEFAULT_PASS}', f'HOSTNAME={RABBITMQ_HOSTNAME}', From e37e58939905834da0076210710c4ea36a73078b Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 13 May 2019 16:01:01 +0200 Subject: [PATCH 095/106] Enable multiple worker in production --- docker/start.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docker/start.py b/docker/start.py index d5fd846f6..a66b5a97e 100644 --- a/docker/start.py +++ b/docker/start.py @@ -85,8 +85,11 @@ def generate_docker_compose_file(conf, launch_settings): if org_name_stripped == 'chunantes': port = 8001 + cpu_count = os.cpu_count() + processes = 2 * int(cpu_count) + 1 + if launch_settings == 'prod': - django_server = f'python3 manage.py collectstatic --noinput; uwsgi --http :{port} --module substrabac.wsgi --static-map /static=/usr/src/app/substrabac/statics --master' + django_server = f'python3 manage.py collectstatic --noinput; uwsgi --http :{port} --module substrabac.wsgi --static-map /static=/usr/src/app/substrabac/statics --master --processes {processes} --threads 2' else: django_server = f'python3 manage.py runserver 0.0.0.0:{port}' From 613f8da0a7deb03e63b44b69d90b4e9a8737e6e2 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 13 May 2019 15:59:34 +0200 Subject: [PATCH 096/106] Check compute data. --- .../commands/bulkcreatedatasample.py | 2 +- substrabac/substrapp/tests/tests_query.py | 6 ++--- substrabac/substrapp/tests/tests_views.py | 2 +- substrabac/substrapp/views/datasample.py | 25 ++++++------------- 4 files changed, 13 insertions(+), 22 deletions(-) diff --git a/substrabac/substrapp/management/commands/bulkcreatedatasample.py b/substrabac/substrapp/management/commands/bulkcreatedatasample.py index 4a38b1144..d1dd6d2fb 100644 --- a/substrabac/substrapp/management/commands/bulkcreatedatasample.py +++ b/substrabac/substrapp/management/commands/bulkcreatedatasample.py @@ -97,7 +97,7 @@ def bulk_create_data_sample(data): # create on ledger + db ledger_data = {'test_only': test_only, 'data_manager_keys': data_manager_keys} - return DataSampleViewSet.commit(serializer, ledger_data, True) + return DataSampleViewSet.commit(serializer, ledger_data) class Command(BaseCommand): diff --git a/substrabac/substrapp/tests/tests_query.py b/substrabac/substrapp/tests/tests_query.py index b02deaadb..0a604e695 100644 --- a/substrabac/substrapp/tests/tests_query.py +++ b/substrabac/substrapp/tests/tests_query.py @@ -429,7 +429,7 @@ def test_add_data_sample_sync_ok(self): response = self.client.post(url, data, format='multipart', **extra) r = response.json() self.data_file.file.seek(0) - self.assertEqual(r['pkhash'], get_dir_hash(self.data_file.file)) + self.assertEqual(r[0]['pkhash'], get_dir_hash(self.data_file.file)) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -560,7 +560,7 @@ def test_add_data_sample_ko_already_exists(self): response = self.client.post(url, data, format='multipart', **extra) r = response.json() self.assertEqual(r['message'], - [{'pkhash': ['data sample with this pkhash already exists.']}]) + [[{'pkhash': ['data sample with this pkhash already exists.']}]]) self.assertEqual(response.status_code, status.HTTP_409_CONFLICT) def test_add_data_sample_ko_not_a_zip(self): @@ -617,7 +617,7 @@ def test_add_data_sample_ko_408(self): mis_zipfile.return_value = True response = self.client.post(url, data, format='multipart', **extra) r = response.json() - self.assertEqual(r['message'], {'pkhash': get_hash(file_mock), 'validated': False}) + self.assertEqual(r['message'], {'pkhash': [get_dir_hash(file_mock)], 'validated': False}) self.assertEqual(response.status_code, status.HTTP_408_REQUEST_TIMEOUT) def test_bulk_add_data_sample_ko_408(self): diff --git a/substrabac/substrapp/tests/tests_views.py b/substrabac/substrapp/tests/tests_views.py index 348af7909..76d418adb 100644 --- a/substrabac/substrapp/tests/tests_views.py +++ b/substrabac/substrapp/tests/tests_views.py @@ -1110,7 +1110,7 @@ def test_data_create(self): status.HTTP_201_CREATED) response = self.client.post(url, data=data, format='multipart', **self.extra) - self.assertEqual(response.data['pkhash'], pkhash) + self.assertEqual(response.data[0]['pkhash'], pkhash) self.assertEqual(response.status_code, status.HTTP_201_CREATED) data['file'].close() diff --git a/substrabac/substrapp/views/datasample.py b/substrabac/substrapp/views/datasample.py index f2c6b2603..4b8cab446 100644 --- a/substrabac/substrapp/views/datasample.py +++ b/substrabac/substrapp/views/datasample.py @@ -148,11 +148,9 @@ def check_datamanagers(data_manager_keys): raise Exception(f'One or more datamanager keys provided do not exist in local substrabac database. Please create them before. DataManager keys: {data_manager_keys}') @staticmethod - def commit(serializer, ledger_data, many): + def commit(serializer, ledger_data): instances = serializer.save() # can raise # init ledger serializer - if not many: - instances = [instances] ledger_data.update({'instances': instances}) ledger_serializer = LedgerDataSampleSerializer(data=ledger_data) @@ -166,8 +164,7 @@ def commit(serializer, ledger_data, many): data, st = ledger_serializer.create(ledger_serializer.validated_data) if st == status.HTTP_408_REQUEST_TIMEOUT: - if many: - data.update({'pkhash': [x['pkhash'] for x in serializer.data]}) + data.update({'pkhash': [x['pkhash'] for x in serializer.data]}) raise LedgerException(data, st) if st not in (status.HTTP_201_CREATED, status.HTTP_202_ACCEPTED): @@ -175,13 +172,9 @@ def commit(serializer, ledger_data, many): # update validated to True in response if 'pkhash' in data and data['validated']: - if many: - for d in serializer.data: - if d['pkhash'] in data['pkhash']: - d.update({'validated': data['validated']}) - else: - d = dict(serializer.data) - d.update({'validated': data['validated']}) + for d in serializer.data: + if d['pkhash'] in data['pkhash']: + d.update({'validated': data['validated']}) return serializer.data, st @@ -230,7 +223,7 @@ def compute_data(self, request): 'path': normpath(path) } - if not data: + if not data: # data empty raise Exception(f'No data sample provided.') return list(data.values()) @@ -271,10 +264,8 @@ def _create(self, request, data_manager_keys, test_only, dryrun): computed_data = self.compute_data(request) - many = len(computed_data) > 1 - data = computed_data if many else computed_data[0] + serializer = self.get_serializer(data=computed_data, many=True) - serializer = self.get_serializer(data=data, many=many) try: serializer.is_valid(raise_exception=True) except Exception as e: @@ -290,7 +281,7 @@ def _create(self, request, data_manager_keys, test_only, dryrun): # create on ledger + db ledger_data = {'test_only': test_only, 'data_manager_keys': data_manager_keys} - data, st = self.commit(serializer, ledger_data, many) + data, st = self.commit(serializer, ledger_data) return data, st def create(self, request, *args, **kwargs): From 8f5cc75e8934114a9c6b3221233d6e04d6fb6479 Mon Sep 17 00:00:00 2001 From: GuillaumeCisco Date: Mon, 13 May 2019 18:40:59 +0200 Subject: [PATCH 097/106] Fix algo command --- substrabac/substrapp/tasks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/substrabac/substrapp/tasks.py b/substrabac/substrapp/tasks.py index 1b9ea6d77..73aede428 100644 --- a/substrabac/substrapp/tasks.py +++ b/substrabac/substrapp/tasks.py @@ -414,17 +414,17 @@ def doTask(subtuple, tuple_type): # add list of inmodels if subtuple['inModels'] is not None: inmodels = [subtuple_model["traintupleKey"] for subtuple_model in subtuple['inModels']] - algo_command += " ".join(inmodels) + algo_command = f"{algo_command} {' '.join(inmodels)}" # add fltask rank for training if flrank is not None: - algo_command += f' --rank {flrank}' + algo_command = f"{algo_command} --rank {flrank}" elif tuple_type == 'testtuple': algo_command = 'predict' # main command inmodels = subtuple['model']["traintupleKey"] - algo_command += f' {inmodels}' + algo_command = f'{algo_command} {inmodels}' # local volume for fltask if fltask is not None and tuple_type == 'traintuple': From 981a156faf40d70d77a70b1c4959f6f6465f7068 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 14 May 2019 16:05:29 +0200 Subject: [PATCH 098/106] Add org name in local volume. --- substrabac/substrapp/tasks.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/substrabac/substrapp/tasks.py b/substrabac/substrapp/tasks.py index 73aede428..da89344b3 100644 --- a/substrabac/substrapp/tasks.py +++ b/substrabac/substrapp/tasks.py @@ -354,6 +354,7 @@ def prepareMaterials(subtuple, model_type): def doTask(subtuple, tuple_type): subtuple_directory = path.join(getattr(settings, 'MEDIA_ROOT'), 'subtuple', subtuple['key']) + org_name = getattr(settings, 'ORG_NAME') # Federated learning variables fltask = None @@ -428,7 +429,7 @@ def doTask(subtuple, tuple_type): # local volume for fltask if fltask is not None and tuple_type == 'traintuple': - flvolume = f'local-{fltask}' + flvolume = f'local-{fltask}-{org_name}' if flrank == 0: client.volumes.create(name=flvolume) else: @@ -499,7 +500,7 @@ def doTask(subtuple, tuple_type): # Rank == -1 -> Last fl subtuple or fl throws an exception if flrank == -1: - flvolume = f'local-{fltask}' + flvolume = f'local-{fltask}-{org_name}' local_volume = client.volumes.get(volume_id=flvolume) try: local_volume.remove(force=True) From abcfbbbbecbe2ecee9556cfb24da47ab5fac5a02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9my=20Morel?= Date: Mon, 20 May 2019 15:26:46 +0200 Subject: [PATCH 099/106] Fix typos --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 3da97e0d8..4be9757c2 100644 --- a/README.md +++ b/README.md @@ -80,7 +80,7 @@ It will drop the databases if they are already created, then create them and gra - With django migrations ```shell SUBSTRABAC_ORG=owkin SUBSTRABAC_DEFAULT_PORT=8000 python substrabac/manage.py migrate --settings=substrabac.settings.dev -SUBSTRABAC_ORG=chu-nantes SUBSTRABAC_DEFAULT_PORT=8001 python substrabac/manage.py migrate --settings=substrabac.settings.dev``` +SUBSTRABAC_ORG=chu-nantes SUBSTRABAC_DEFAULT_PORT=8001 python substrabac/manage.py migrate --settings=substrabac.settings.dev ``` ###### With fixtures (fixtures container has been run from substra-network, old behavior for testing) @@ -143,7 +143,7 @@ Execute this command in the `substrabac/substrabac` folder. Note the use of the development settings. ```shell -DJANGO_SETTINGS_MODULE=substrabac.settings.dev SUBSTRABAC_ORG=owkin SUBSTRABAC_DEFAULT_PORT=8000 celery -E -A substrabac worker -l info -B -n owkin -Q owkin,scheduler, celery --hostname owkin.scheduler +DJANGO_SETTINGS_MODULE=substrabac.settings.dev SUBSTRABAC_ORG=owkin SUBSTRABAC_DEFAULT_PORT=8000 celery -E -A substrabac worker -l info -B -n owkin -Q owkin,scheduler,celery --hostname owkin.scheduler DJANGO_SETTINGS_MODULE=substrabac.settings.dev SUBSTRABAC_ORG=owkin SUBSTRABAC_DEFAULT_PORT=8000 celery -E -A substrabac worker -l info -B -n owkin -Q owkin,owkin.worker,celery --hostname owkin.worker DJANGO_SETTINGS_MODULE=substrabac.settings.dev SUBSTRABAC_ORG=owkin SUBSTRABAC_DEFAULT_PORT=8000 celery -E -A substrabac worker -l info -B -n owkin -Q owkin,owkin.dryrunner,celery --hostname owkin.dryrunner DJANGO_SETTINGS_MODULE=substrabac.settings.dev SUBSTRABAC_ORG=chu-nantes SUBSTRABAC_DEFAULT_PORT=8001 celery -E -A substrabac worker -l info -B -n chunantes -Q chu-nantes,scheduler,celery --hostname chu-nantes.scheduler From d12fd398b9d169bb111bd0ee61e1828dcb3723b6 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 6 May 2019 11:23:23 +0200 Subject: [PATCH 100/106] Three orgs configuration --- docker/postgresql/init.sh | 4 ++++ docker/start.py | 12 +++++++++--- substrabac/populate.py | 6 +++++- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/docker/postgresql/init.sh b/docker/postgresql/init.sh index 9032cd387..92590b661 100644 --- a/docker/postgresql/init.sh +++ b/docker/postgresql/init.sh @@ -5,3 +5,7 @@ psql -U ${USER} -d substrabac_owkin -c "GRANT ALL PRIVILEGES ON DATABASE substra createdb -U ${USER} -E UTF8 substrabac_chunantes psql -U ${USER} -d substrabac_chunantes -c "GRANT ALL PRIVILEGES ON DATABASE substrabac_chunantes to substrabac;ALTER ROLE substrabac WITH SUPERUSER CREATEROLE CREATEDB;" + + +createdb -U ${USER} -E UTF8 substrabac_clb +psql -U ${USER} -d substrabac_clb -c "GRANT ALL PRIVILEGES ON DATABASE substrabac_clb to substrabac;ALTER ROLE substrabac WITH SUPERUSER CREATEROLE CREATEDB;" diff --git a/docker/start.py b/docker/start.py index 18ce433e7..dd938b023 100644 --- a/docker/start.py +++ b/docker/start.py @@ -13,6 +13,13 @@ FABRIC_LOGGING_SPEC = "debug" +BACKEND_PORT = { + 'owkin': 8000, + 'chunantes': 8001, + 'clb': 8002 +} + + def generate_docker_compose_file(conf, launch_settings): # POSTGRES @@ -83,9 +90,8 @@ def generate_docker_compose_file(conf, launch_settings): org_name_stripped = org_name.replace('-', '') # Dirty port assign - port = 8000 - if org_name_stripped == 'chunantes': - port = 8001 + + port = BACKEND_PORT[org_name_stripped] cpu_count = os.cpu_count() processes = 2 * int(cpu_count) + 1 diff --git a/substrabac/populate.py b/substrabac/populate.py index 7f1f18cb5..19edc7a8a 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -18,6 +18,7 @@ def setup_config(): print('Init config in /tmp/.substrabac for owkin and chunantes') client.create_config('owkin', 'http://owkin.substrabac:8000', '0.0') client.create_config('chunantes', 'http://chunantes.substrabac:8001', '0.0') + client.create_config('clb', 'http://clb.substrabac:8002', '0.0') def get_or_create(data, profile, asset, dryrun=False, register=False): @@ -78,10 +79,13 @@ def do_populate(): parser = argparse.ArgumentParser() parser.add_argument('-o', '--one-org', action='store_true', default=False, help='Launch populate with one org only') + parser.add_argument('-t', '--three-orgs', action='store_true', default=False, + help='Launch populate with three orgs') args = vars(parser.parse_args()) org_0 = 'owkin' org_1 = org_0 if args['one_org'] else 'chunantes' + org_2 = 'clb' if args['three_orgs'] else org_0 print(f'will create datamanager with {org_1}') # create datamanager with org1 @@ -211,7 +215,7 @@ def do_populate(): 'description': os.path.join(dir_path, './fixtures/chunantes/algos/algo3/description.md'), 'permissions': 'all', } - algo_key = get_or_create(data, org_1, 'algo') + algo_key = get_or_create(data, org_2, 'algo') #################################################### From 25dbb8851335a4bafb2f790a1a435275ea102c91 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 20 May 2019 16:00:20 +0200 Subject: [PATCH 101/106] Clean scripts. --- docker/start.py | 2 -- substrabac/populate.py | 26 +++++++++++++++++++------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/docker/start.py b/docker/start.py index dd938b023..c2a33bd5e 100644 --- a/docker/start.py +++ b/docker/start.py @@ -89,8 +89,6 @@ def generate_docker_compose_file(conf, launch_settings): peer = org['peer']['name'] org_name_stripped = org_name.replace('-', '') - # Dirty port assign - port = BACKEND_PORT[org_name_stripped] cpu_count = os.cpu_count() diff --git a/substrabac/populate.py b/substrabac/populate.py index 19edc7a8a..4f2319a52 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -77,15 +77,27 @@ def do_populate(): setup_config() parser = argparse.ArgumentParser() - parser.add_argument('-o', '--one-org', action='store_true', default=False, - help='Launch populate with one org only') - parser.add_argument('-t', '--three-orgs', action='store_true', default=False, - help='Launch populate with three orgs') + group = parser.add_mutually_exclusive_group() + group.add_argument('-o', '--one-org', action='store_const', dest='nb_org', const=1, + help='Launch populate with one org only') + group.add_argument('-tw', '--two-orgs', action='store_const', dest='nb_org', const=2, + help='Launch populate with two orgs only') + group.add_argument('-th', '--three-orgs', action='store_const', dest='nb_org', const=3, + help='Launch populate with three orgs') + parser.set_defaults(nb_org=2) args = vars(parser.parse_args()) - org_0 = 'owkin' - org_1 = org_0 if args['one_org'] else 'chunantes' - org_2 = 'clb' if args['three_orgs'] else org_0 + if args['nb_org'] == 1: + org_0 = org_1 = org_2 = 'owkin' + elif args['nb_org'] == 2: + org_0 = org_2 = 'owkin' + org_1 = 'chunantes' + elif args['nb_org'] == 3: + org_0 = 'owkin' + org_1 = 'chunantes' + org_2 = 'clb' + else: + raise Exception(f"Number of orgs {args['nb_org']} not in [1, 2, 3]") print(f'will create datamanager with {org_1}') # create datamanager with org1 From e5040d07640d4e0b0c73dd1a0125f2664ca3df55 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 21 May 2019 09:52:56 +0200 Subject: [PATCH 102/106] Remove 'only' from populate. --- substrabac/populate.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/substrabac/populate.py b/substrabac/populate.py index 4f2319a52..20cb01706 100644 --- a/substrabac/populate.py +++ b/substrabac/populate.py @@ -79,9 +79,9 @@ def do_populate(): parser = argparse.ArgumentParser() group = parser.add_mutually_exclusive_group() group.add_argument('-o', '--one-org', action='store_const', dest='nb_org', const=1, - help='Launch populate with one org only') + help='Launch populate with one org') group.add_argument('-tw', '--two-orgs', action='store_const', dest='nb_org', const=2, - help='Launch populate with two orgs only') + help='Launch populate with two orgs') group.add_argument('-th', '--three-orgs', action='store_const', dest='nb_org', const=3, help='Launch populate with three orgs') parser.set_defaults(nb_org=2) From 99366758379402f7165a42f47c4a4a761baefe88 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Wed, 22 May 2019 10:23:47 +0200 Subject: [PATCH 103/106] Update peer port use along to new substrabac config file. --- docker/start.py | 12 ++++++++---- substrabac/substrapp/utils.py | 8 ++++++-- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/docker/start.py b/docker/start.py index c2a33bd5e..aa29d0c7e 100644 --- a/docker/start.py +++ b/docker/start.py @@ -107,6 +107,7 @@ def generate_docker_compose_file(conf, launch_settings): 'command': f'/bin/bash -c "while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; yes | python manage.py migrate; {django_server}"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, 'environment': ['DATABASE_HOST=postgresql', + 'SUBSTRABAC_PEER_PORT=internal', f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', @@ -117,7 +118,7 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_HOST={os.environ.get('SITE_HOST', 'localhost')}", f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/substra/medias:/substra/medias', '/substra/servermedias:/substra/servermedias', @@ -140,6 +141,7 @@ def generate_docker_compose_file(conf, launch_settings): 'environment': [f'ORG={org_name_stripped}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', + 'SUBSTRABAC_PEER_PORT=internal', f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', 'PYTHONUNBUFFERED=1', @@ -149,7 +151,7 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", 'DATABASE_HOST=postgresql', f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': [f'/substra/conf/{org_name}:/substra/conf/{org_name}', f'{orderer_ca}:{orderer_ca}', @@ -168,6 +170,7 @@ def generate_docker_compose_file(conf, launch_settings): 'environment': [f'ORG={org_name_stripped}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', + 'SUBSTRABAC_PEER_PORT=internal', f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', 'PYTHONUNBUFFERED=1', @@ -177,7 +180,7 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", 'DATABASE_HOST=postgresql', f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', @@ -199,6 +202,7 @@ def generate_docker_compose_file(conf, launch_settings): 'environment': [f'ORG={org_name_stripped}', f'SUBSTRABAC_ORG={org_name}', f'SUBSTRABAC_DEFAULT_PORT={port}', + 'SUBSTRABAC_PEER_PORT=internal', f'CELERY_BROKER_URL={CELERY_BROKER_URL}', f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', 'PYTHONUNBUFFERED=1', @@ -208,7 +212,7 @@ def generate_docker_compose_file(conf, launch_settings): f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", 'DATABASE_HOST=postgresql', f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['docker_port']}", + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', diff --git a/substrabac/substrapp/utils.py b/substrabac/substrapp/utils.py index 9e15cfe4f..5f48345bd 100644 --- a/substrabac/substrapp/utils.py +++ b/substrabac/substrapp/utils.py @@ -40,10 +40,12 @@ def queryLedger(options): core_peer_mspconfigpath = LEDGER['core_peer_mspconfigpath'] peer = LEDGER['peer'] + peer_port = peer["port"][os.environ.get('SUBSTRABAC_PEER_PORT', 'external')] + # update config path for using right core.yaml and override msp config path os.environ['FABRIC_CFG_PATH'] = os.environ.get('FABRIC_CFG_PATH_ENV', peer['docker_core_dir']) os.environ['CORE_PEER_MSPCONFIGPATH'] = os.environ.get('CORE_PEER_MSPCONFIGPATH_ENV', core_peer_mspconfigpath) - os.environ['CORE_PEER_ADDRESS'] = os.environ.get('CORE_PEER_ADDRESS_ENV', f'{peer["host"]}:{peer["port"]}') + os.environ['CORE_PEER_ADDRESS'] = os.environ.get('CORE_PEER_ADDRESS_ENV', f'{peer["host"]}:{peer_port}') print(f'Querying chaincode in the channel \'{channel_name}\' on the peer \'{peer["host"]}\' ...', flush=True) @@ -93,6 +95,8 @@ def invokeLedger(options, sync=False): chaincode_name = LEDGER['chaincode_name'] core_peer_mspconfigpath = LEDGER['core_peer_mspconfigpath'] peer = LEDGER['peer'] + peer_port = peer["port"][os.environ.get('SUBSTRABAC_PEER_PORT', 'external')] + orderer = LEDGER['orderer'] orderer_ca_file = orderer['ca'] peer_key_file = peer['clientKey'] @@ -101,7 +105,7 @@ def invokeLedger(options, sync=False): # update config path for using right core.yaml and override msp config path os.environ['FABRIC_CFG_PATH'] = os.environ.get('FABRIC_CFG_PATH_ENV', peer['docker_core_dir']) os.environ['CORE_PEER_MSPCONFIGPATH'] = os.environ.get('CORE_PEER_MSPCONFIGPATH_ENV', core_peer_mspconfigpath) - os.environ['CORE_PEER_ADDRESS'] = os.environ.get('CORE_PEER_ADDRESS_ENV', f'{peer["host"]}:{peer["port"]}') + os.environ['CORE_PEER_ADDRESS'] = os.environ.get('CORE_PEER_ADDRESS_ENV', f'{peer["host"]}:{peer_port}') print(f'Sending invoke transaction to {peer["host"]} ...', flush=True) From 27b20367c5503e70e489721be068daf71db69a69 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Mon, 20 May 2019 11:53:16 +0200 Subject: [PATCH 104/106] Add real tls path and add read-only option for hlf files. We need to do some refactoring on the volume mount. --- docker/start.py | 44 ++++++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/docker/start.py b/docker/start.py index aa29d0c7e..a3d85dbe5 100644 --- a/docker/start.py +++ b/docker/start.py @@ -124,11 +124,12 @@ def generate_docker_compose_file(conf, launch_settings): '/substra/servermedias:/substra/servermedias', '/substra/dryrun:/substra/dryrun', '/substra/static:/usr/src/app/substrabac/statics', - f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'{orderer_ca}:{orderer_ca}', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', + f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', + f'{orderer_ca}:{orderer_ca}:ro', + f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', ], 'depends_on': ['postgresql', 'rabbit']} @@ -153,11 +154,12 @@ def generate_docker_compose_file(conf, launch_settings): f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], - 'volumes': [f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'{orderer_ca}:{orderer_ca}', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', + 'volumes': [f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', + f'{orderer_ca}:{orderer_ca}:ro', + f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', ], 'depends_on': [f'substrabac{org_name_stripped}', 'postgresql', 'rabbit']} @@ -185,11 +187,12 @@ def generate_docker_compose_file(conf, launch_settings): 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', '/substra/servermedias:/substra/servermedias', - f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'{orderer_ca}:{orderer_ca}', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', + f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', + f'{orderer_ca}:{orderer_ca}:ro', + f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', ], 'depends_on': [f'substrabac{org_name_stripped}', 'rabbit']} @@ -218,11 +221,12 @@ def generate_docker_compose_file(conf, launch_settings): '/substra/medias:/substra/medias', '/substra/servermedias:/substra/servermedias', '/substra/dryrun:/substra/dryrun', - f'/substra/conf/{org_name}:/substra/conf/{org_name}', - f'{orderer_ca}:{orderer_ca}', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}', + f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', + f'{orderer_ca}:{orderer_ca}:ro', + f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', + f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', ], 'depends_on': [f'substrabac{org_name_stripped}', 'rabbit']} From 67801cf58eb96f3eee7711d136a1c2c317fbfb46 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 21 May 2019 11:22:54 +0200 Subject: [PATCH 105/106] Clean docker start and add core hlf env to overwrite the core.yaml from peer. --- docker/start.py | 145 ++++++++++++++++++------------------------------ 1 file changed, 53 insertions(+), 92 deletions(-) diff --git a/docker/start.py b/docker/start.py index a3d85dbe5..3981a9511 100644 --- a/docker/start.py +++ b/docker/start.py @@ -87,6 +87,8 @@ def generate_docker_compose_file(conf, launch_settings): org_name = org['name'] orderer_ca = org['orderer']['ca'] peer = org['peer']['name'] + tls_peer_dir = f'/substra/data/orgs/{org_name}/tls/{peer}' + org_name_stripped = org_name.replace('-', '') port = BACKEND_PORT[org_name_stripped] @@ -100,37 +102,58 @@ def generate_docker_compose_file(conf, launch_settings): django_server = f'python3 manage.py runserver 0.0.0.0:{port}' + backend_global_env = [ + f'ORG={org_name_stripped}', + f'SUBSTRABAC_ORG={org_name}', + f'SUBSTRABAC_DEFAULT_PORT={port}', + 'SUBSTRABAC_PEER_PORT=internal', + + 'PYTHONUNBUFFERED=1', + 'DATABASE_HOST=postgresql', + + f'CELERY_BROKER_URL={CELERY_BROKER_URL}', + f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', + + # Basic auth + f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", + f"BACK_AUTH_PASSWORD={os.environ.get('BACK_AUTH_PASSWORD', '')}", + f"SITE_HOST={os.environ.get('SITE_HOST', 'localhost')}", + f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", + + # HLF overwrite config from core.yaml + f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", + f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}", + f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", + f"CORE_PEER_MSPCONFIGPATH={org['core_peer_mspconfigpath']}", + f"CORE_PEER_TLS_CERT_FILE={tls_peer_dir}/server/server.crt", + f"CORE_PEER_TLS_KEY_FILE={tls_peer_dir}/server/server.key", + f"CORE_PEER_TLS_ROOTCERT_FILE={tls_peer_dir}/server/server.pem", + f"CORE_PEER_TLS_CLIENTCERT_FILE={tls_peer_dir}/client/client.crt", + f"CORE_PEER_TLS_CLIENTKEY_FILE={tls_peer_dir}/client/client.key", + f"CORE_PEER_TLS_CLIENTROOTCAS_FILES={tls_peer_dir}/client/client.pem", + ] + + hlf_volumes = [ + # config (core.yaml + substrabac/conf.json) + f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', + + # HLF files + f'{orderer_ca}:{orderer_ca}:ro', + f'{tls_peer_dir}:{tls_peer_dir}:ro', + f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', + ] + backend = {'container_name': f'{org_name_stripped}.substrabac', 'image': 'substra/substrabac', 'restart': 'unless-stopped', 'ports': [f'{port}:{port}'], 'command': f'/bin/bash -c "while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; yes | python manage.py migrate; {django_server}"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, - 'environment': ['DATABASE_HOST=postgresql', - 'SUBSTRABAC_PEER_PORT=internal', - f'CELERY_BROKER_URL={CELERY_BROKER_URL}', - f'SUBSTRABAC_ORG={org_name}', - f'SUBSTRABAC_DEFAULT_PORT={port}', - f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', - 'PYTHONUNBUFFERED=1', - f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", - f"BACK_AUTH_PASSWORD={os.environ.get('BACK_AUTH_PASSWORD', '')}", - f"SITE_HOST={os.environ.get('SITE_HOST', 'localhost')}", - f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", - f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", - f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], + 'environment': backend_global_env.copy(), 'volumes': ['/substra/medias:/substra/medias', - '/substra/servermedias:/substra/servermedias', '/substra/dryrun:/substra/dryrun', - '/substra/static:/usr/src/app/substrabac/statics', - f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', - f'{orderer_ca}:{orderer_ca}:ro', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', - ], + '/substra/servermedias:/substra/servermedias', + '/substra/static:/usr/src/app/substrabac/statics'] + hlf_volumes, 'depends_on': ['postgresql', 'rabbit']} scheduler = {'container_name': f'{org_name_stripped}.scheduler', @@ -139,28 +162,8 @@ def generate_docker_compose_file(conf, launch_settings): 'restart': 'unless-stopped', 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},scheduler,celery --hostname {org_name}.scheduler"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, - 'environment': [f'ORG={org_name_stripped}', - f'SUBSTRABAC_ORG={org_name}', - f'SUBSTRABAC_DEFAULT_PORT={port}', - 'SUBSTRABAC_PEER_PORT=internal', - f'CELERY_BROKER_URL={CELERY_BROKER_URL}', - f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', - 'PYTHONUNBUFFERED=1', - f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", - f"BACK_AUTH_PASSWORD={os.environ.get('BACK_AUTH_PASSWORD', '')}", - f"SITE_HOST={os.environ.get('SITE_HOST', 'localhost')}", - f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", - 'DATABASE_HOST=postgresql', - f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", - f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], - 'volumes': [f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', - f'{orderer_ca}:{orderer_ca}:ro', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', - ], + 'environment': backend_global_env.copy(), + 'volumes': hlf_volumes, 'depends_on': [f'substrabac{org_name_stripped}', 'postgresql', 'rabbit']} worker = {'container_name': f'{org_name_stripped}.worker', @@ -169,31 +172,10 @@ def generate_docker_compose_file(conf, launch_settings): 'restart': 'unless-stopped', 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},{org_name}.worker,celery --hostname {org_name}.worker"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, - 'environment': [f'ORG={org_name_stripped}', - f'SUBSTRABAC_ORG={org_name}', - f'SUBSTRABAC_DEFAULT_PORT={port}', - 'SUBSTRABAC_PEER_PORT=internal', - f'CELERY_BROKER_URL={CELERY_BROKER_URL}', - f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', - 'PYTHONUNBUFFERED=1', - f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", - f"BACK_AUTH_PASSWORD={os.environ.get('BACK_AUTH_PASSWORD', '')}", - f"SITE_HOST={os.environ.get('SITE_HOST', 'localhost')}", - f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", - 'DATABASE_HOST=postgresql', - f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", - f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], + 'environment': backend_global_env.copy(), 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', '/substra/medias:/substra/medias', - '/substra/servermedias:/substra/servermedias', - f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', - f'{orderer_ca}:{orderer_ca}:ro', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', - ], + '/substra/servermedias:/substra/servermedias'] + hlf_volumes, 'depends_on': [f'substrabac{org_name_stripped}', 'rabbit']} dryrunner = {'container_name': f'{org_name_stripped}.dryrunner', @@ -202,32 +184,11 @@ def generate_docker_compose_file(conf, launch_settings): 'restart': 'unless-stopped', 'command': f'/bin/bash -c "while ! {{ nc -z rabbit 5672 2>&1; }}; do sleep 1; done; while ! {{ nc -z postgresql 5432 2>&1; }}; do sleep 1; done; celery -A substrabac worker -l info -n {org_name_stripped} -Q {org_name},{org_name}.dryrunner,celery --hostname {org_name}.dryrunner"', 'logging': {'driver': 'json-file', 'options': {'max-size': '20m', 'max-file': '5'}}, - 'environment': [f'ORG={org_name_stripped}', - f'SUBSTRABAC_ORG={org_name}', - f'SUBSTRABAC_DEFAULT_PORT={port}', - 'SUBSTRABAC_PEER_PORT=internal', - f'CELERY_BROKER_URL={CELERY_BROKER_URL}', - f'DJANGO_SETTINGS_MODULE=substrabac.settings.{launch_settings}', - 'PYTHONUNBUFFERED=1', - f"BACK_AUTH_USER={os.environ.get('BACK_AUTH_USER', '')}", - f"BACK_AUTH_PASSWORD={os.environ.get('BACK_AUTH_PASSWORD', '')}", - f"SITE_HOST={os.environ.get('SITE_HOST', 'localhost')}", - f"SITE_PORT={os.environ.get('BACK_PORT', 9000)}", - 'DATABASE_HOST=postgresql', - f"FABRIC_CFG_PATH_ENV={org['peer']['docker_core_dir']}", - f"CORE_PEER_ADDRESS_ENV={org['peer']['host']}:{org['peer']['port']['internal']}", - f"FABRIC_LOGGING_SPEC={FABRIC_LOGGING_SPEC}"], + 'environment': backend_global_env.copy(), 'volumes': ['/var/run/docker.sock:/var/run/docker.sock', - '/substra/medias:/substra/medias', - '/substra/servermedias:/substra/servermedias', '/substra/dryrun:/substra/dryrun', - f'/substra/conf/{org_name}:/substra/conf/{org_name}:ro', - f'{orderer_ca}:{orderer_ca}:ro', - f'/substra/data/orgs/{org_name}/ca-cert.pem:/substra/data/orgs/{org_name}/ca-cert.pem:ro', - f'{org["core_peer_mspconfigpath"]}:{org["core_peer_mspconfigpath"]}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/substra/data/orgs/{org_name}/tls/{peer}:ro', - f'/substra/data/orgs/{org_name}/tls/{peer}:/etc/hyperledger/fabric/tls:ro', - ], + '/substra/medias:/substra/medias', + '/substra/servermedias:/substra/servermedias'] + hlf_volumes, 'depends_on': [f'substrabac{org_name_stripped}', 'rabbit']} # Check if we have nvidia docker From 5e67268037c2607213e3cdda94d38df3666e52e8 Mon Sep 17 00:00:00 2001 From: Kelvin Moutet Date: Tue, 21 May 2019 16:44:25 +0200 Subject: [PATCH 106/106] Improve docker ps --- docker/start.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/start.py b/docker/start.py index 3981a9511..998381faa 100644 --- a/docker/start.py +++ b/docker/start.py @@ -264,7 +264,7 @@ def start(conf, launch_settings, no_backup): print('start docker-compose', flush=True) call(['docker-compose', '-f', docker_compose['path'], '--project-directory', os.path.join(dir_path, '../'), 'up', '-d', '--remove-orphans', '--build']) - call(['docker', 'ps', '-a']) + call(['docker', 'ps', '-a', '--format', 'table {{.ID}}\t{{.Names}}\t{{.Status}}\t{{.Ports}}']) if __name__ == "__main__":