From c522f656eaee6e8a7231e63d5ac7df9cf5c45aed Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Tue, 24 Sep 2024 17:11:30 +0200 Subject: [PATCH 1/6] chore: make compatible with new bal tools release --- bal_addresses/requirements.txt | 2 +- gen_core_pools.py | 5 ++++- gen_pools_and_gauges.py | 21 +++++++++------------ setup.py | 2 +- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/bal_addresses/requirements.txt b/bal_addresses/requirements.txt index 09bc4e43..3c80d7e2 100644 --- a/bal_addresses/requirements.txt +++ b/bal_addresses/requirements.txt @@ -1,5 +1,5 @@ pathlib>=1.0 -git+https://github.com/BalancerMaxis/bal_tools@v0.1.7 +git+https://github.com/BalancerMaxis/bal_tools.git@e63151cc5a61455e9dd87a39f3760c926cbeec0a requests pandas web3 diff --git a/gen_core_pools.py b/gen_core_pools.py index 1c98e461..033858c7 100644 --- a/gen_core_pools.py +++ b/gen_core_pools.py @@ -13,7 +13,10 @@ def main(): # dump the collected dict to json file with open("outputs/core_pools.json", "w") as f: - json.dump(core_pools, f, indent=2) + core_pools_dict = {} + for chain in core_pools: + core_pools_dict[chain] = core_pools[chain].pools + json.dump(core_pools_dict, f, indent=2) if __name__ == "__main__": diff --git a/gen_pools_and_gauges.py b/gen_pools_and_gauges.py index 67cd6ead..c9e908f3 100644 --- a/gen_pools_and_gauges.py +++ b/gen_pools_and_gauges.py @@ -31,25 +31,25 @@ def query_swap_enabled_pools(chain, skip=0, step_size=100) -> list: return result -def process_query_swap_enabled_pools(result) -> dict: +def process_query_pools(result) -> dict: df = pd.DataFrame(result) if len(df) == 0: return # assert no duplicate addresses exist - assert len(df["address"].unique()) == len(df) + assert len(df["id"].unique()) == len(df) # solve issue of duplicate gauge symbols - df["symbol"] = df["symbol"] + "-" + df["address"].str[2:6] + df["symbol"] = df["symbol"] + "-" + df["id"].str[2:6] # confirm no duplicate symbols exist, raise if so if len(df["symbol"].unique()) != len(df): print("Found duplicate symbols!") print(df[df["symbol"].duplicated(keep=False)].sort_values("symbol")) raise - return df.set_index("symbol")["address"].to_dict() + return df.set_index("symbol")["id"].to_dict() -def process_query_preferential_gauges(result) -> dict: +def process_query_gauges(result) -> dict: df = pd.DataFrame(result) if len(df) == 0: return @@ -110,21 +110,18 @@ def main(): chains = json.load(f) for chain in chains["BALANCER_PRODUCTION_CHAINS"]: print(f"Generating pools and gauges for {chain}...") - gauge_info = BalPoolsGauges(chain) + pool_gauge_info = BalPoolsGauges(chain) # pools - # TODO: consider moving to query object?? - result = process_query_swap_enabled_pools(query_swap_enabled_pools(chain)) + result = process_query_pools(query_swap_enabled_pools(chain)) if result: pools[chain] = result # gauges - result = process_query_preferential_gauges( - gauge_info.query_preferential_gauges() - ) + result = process_query_gauges(pool_gauge_info.query_preferential_gauges()) if result: gauges[chain] = result # cache mainnet BalPoolsGauges if chain == "mainnet": - gauge_info_mainnet = gauge_info + gauge_info_mainnet = pool_gauge_info # root gauges; only on mainnet result = process_query_root_gauges(gauge_info_mainnet.query_root_gauges(), gauges) diff --git a/setup.py b/setup.py index 36a5b1f6..afff6818 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ "web3", "gql[requests]", "requests", - "bal_tools @ git+https://github.com/BalancerMaxis/bal_tools@v0.1.7", + "bal_tools @ git+https://github.com/BalancerMaxis/bal_tools.git@e63151cc5a61455e9dd87a39f3760c926cbeec0a", ], keywords=["python", "first package"], classifiers=[ From 34caf3fcfeb85e6cd4043c421dcef234a6ccd738 Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Tue, 24 Sep 2024 17:11:52 +0200 Subject: [PATCH 2/6] chore: add black linter to dev env --- bal_addresses/requirements-dev.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/bal_addresses/requirements-dev.txt b/bal_addresses/requirements-dev.txt index c3b82fe9..5c5b7ab2 100644 --- a/bal_addresses/requirements-dev.txt +++ b/bal_addresses/requirements-dev.txt @@ -2,3 +2,4 @@ pytest-mock responses pytest-cov pytest==7.4.0 +black==22.10 From 6d6cb39fb69f8b4e9612bc64cd37d058645c3886 Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Wed, 25 Sep 2024 12:12:19 +0200 Subject: [PATCH 3/6] style: rename to match rest of generation scripts --- .github/workflows/generate_permissions.yaml | 2 +- generate_current_permissions.py => gen_current_permissions.py | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename generate_current_permissions.py => gen_current_permissions.py (100%) diff --git a/.github/workflows/generate_permissions.yaml b/.github/workflows/generate_permissions.yaml index 87d10115..48508937 100644 --- a/.github/workflows/generate_permissions.yaml +++ b/.github/workflows/generate_permissions.yaml @@ -25,7 +25,7 @@ jobs: id: update run: | pip3 install -r bal_addresses/requirements.txt - python3 generate_current_permissions.py + python3 gen_current_permissions.py git add -A - name: pull-request diff --git a/generate_current_permissions.py b/gen_current_permissions.py similarity index 100% rename from generate_current_permissions.py rename to gen_current_permissions.py From 0ea795d9c1c01dc36d24f72666a252d304bea9a9 Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Wed, 25 Sep 2024 13:03:02 +0200 Subject: [PATCH 4/6] refactor: core pools object can be serialised to json --- gen_core_pools.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/gen_core_pools.py b/gen_core_pools.py index 033858c7..1c98e461 100644 --- a/gen_core_pools.py +++ b/gen_core_pools.py @@ -13,10 +13,7 @@ def main(): # dump the collected dict to json file with open("outputs/core_pools.json", "w") as f: - core_pools_dict = {} - for chain in core_pools: - core_pools_dict[chain] = core_pools[chain].pools - json.dump(core_pools_dict, f, indent=2) + json.dump(core_pools, f, indent=2) if __name__ == "__main__": From c43752bf33756e28a179e1136220aec1d21ada94 Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Wed, 25 Sep 2024 13:12:20 +0200 Subject: [PATCH 5/6] chore: add json fix dep --- bal_addresses/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/bal_addresses/requirements.txt b/bal_addresses/requirements.txt index 3c80d7e2..714be886 100644 --- a/bal_addresses/requirements.txt +++ b/bal_addresses/requirements.txt @@ -6,3 +6,4 @@ web3 dotmap munch==4.0.0 gql[requests] +json-fix From 679fdcf3b01886eda7d8c65ed0b6623a6755e88e Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Wed, 2 Oct 2024 15:04:52 +0200 Subject: [PATCH 6/6] fix: make compatible with bal_tools v.0.1.10 --- gen_pools_and_gauges.py | 49 ++++++++++++----------------------------- 1 file changed, 14 insertions(+), 35 deletions(-) diff --git a/gen_pools_and_gauges.py b/gen_pools_and_gauges.py index c9e908f3..20192d0c 100644 --- a/gen_pools_and_gauges.py +++ b/gen_pools_and_gauges.py @@ -1,52 +1,31 @@ import json import pandas as pd -import requests from bal_tools import BalPoolsGauges -from bal_tools import Subgraph - - -def query_swap_enabled_pools(chain, skip=0, step_size=100) -> list: - url = Subgraph(chain).get_subgraph_url("core") - query = f"""{{ - pools( - skip: {skip} - first: {step_size} - where: {{swapEnabled: true}} - ) {{ - address - symbol - }} - }}""" - r = requests.post(url, json={"query": query}) - r.raise_for_status() - try: - result = r.json()["data"]["pools"] - except KeyError: - result = [] - if len(result) > 0: - # didnt reach end of results yet, collect next page - result += query_swap_enabled_pools(chain, skip + step_size, step_size) - return result def process_query_pools(result) -> dict: - df = pd.DataFrame(result) + flattened_result = [] + for pool_data in result: + flattened_result.append( + {"address": pool_data.address, "symbol": pool_data.symbol} + ) + df = pd.DataFrame(flattened_result) if len(df) == 0: return # assert no duplicate addresses exist - assert len(df["id"].unique()) == len(df) + assert len(df["address"].unique()) == len(df) # solve issue of duplicate gauge symbols - df["symbol"] = df["symbol"] + "-" + df["id"].str[2:6] + df["symbol"] = df["symbol"] + "-" + df["address"].str[2:6] # confirm no duplicate symbols exist, raise if so if len(df["symbol"].unique()) != len(df): print("Found duplicate symbols!") print(df[df["symbol"].duplicated(keep=False)].sort_values("symbol")) raise - return df.set_index("symbol")["id"].to_dict() + return df.sort_values("address").set_index("symbol")["address"].to_dict() def process_query_gauges(result) -> dict: @@ -54,17 +33,17 @@ def process_query_gauges(result) -> dict: if len(df) == 0: return # assert no duplicate addresses exist - assert len(df["id"].unique()) == len(df) + assert len(df["address"].unique()) == len(df) # solve issue of duplicate gauge symbols - df["symbol"] = df["symbol"] + "-" + df["id"].str[2:6] + df["symbol"] = df["symbol"] + "-" + df["address"].str[2:6] # confirm no duplicate symbols exist, raise if so if len(df["symbol"].unique()) != len(df): print("Found duplicate symbols!") print(df[df["symbol"].duplicated(keep=False)].sort_values("symbol")) raise - return df.set_index("symbol")["id"].to_dict() + return df.sort_values("address").set_index("symbol")["address"].to_dict() def process_query_root_gauges(result, gauges) -> dict: @@ -112,11 +91,11 @@ def main(): print(f"Generating pools and gauges for {chain}...") pool_gauge_info = BalPoolsGauges(chain) # pools - result = process_query_pools(query_swap_enabled_pools(chain)) + result = process_query_pools(pool_gauge_info.query_all_pools()) if result: pools[chain] = result # gauges - result = process_query_gauges(pool_gauge_info.query_preferential_gauges()) + result = process_query_gauges(pool_gauge_info.query_all_gauges()) if result: gauges[chain] = result # cache mainnet BalPoolsGauges