diff --git a/README.md b/README.md
index 973250bb..e668cc08 100644
--- a/README.md
+++ b/README.md
@@ -376,6 +376,8 @@ pm2 start neurons/validator.py \
> NOTE: to access the wandb UI to get statistics about the miners, you can click on this [link](https://wandb.ai/eclipsevortext/subvortex-team) and choose the validator run you want.
+> NOTE: by default the dumps created by the auto-update will be stored in /etc/redis. If you want to change the location, please use `--database.redis_dump_path`.
+
## Releases
- [Release-2.2.0](./scripts/release/release-2.2.0/RELEASE-2.2.0.md)
diff --git a/neurons/validator.py b/neurons/validator.py
index e64f45f2..47942165 100644
--- a/neurons/validator.py
+++ b/neurons/validator.py
@@ -193,7 +193,8 @@ async def run(self):
bt.logging.info("run()")
# Initi versioin control
- self.version_control = VersionControl(self.database)
+ dump_path = self.config.database.redis_dump_path
+ self.version_control = VersionControl(self.database, dump_path)
# Init miners
self.miners = await get_all_miners(self)
@@ -208,6 +209,17 @@ async def run(self):
try:
while 1:
+ # Start the upgrade process every 10 minutes
+ if should_upgrade(self.config.auto_update, self.last_upgrade_check):
+ bt.logging.debug("Checking upgrade")
+ must_restart = await self.version_control.upgrade()
+ if must_restart:
+ finish_wandb()
+ self.version_control.restart()
+ return
+
+ self.last_upgrade_check = time.time()
+
start_epoch = time.time()
await resync_metagraph_and_miners(self)
@@ -266,17 +278,6 @@ async def run_forward():
prev_set_weights_block = get_current_block(self.subtensor)
save_state(self)
- # Start the upgrade process every 10 minutes
- if should_upgrade(self.config.auto_update, self.last_upgrade_check):
- bt.logging.debug("Checking upgrade")
- must_restart = await self.version_control.upgrade()
- if must_restart:
- finish_wandb()
- self.version_control.restart()
- return
-
- self.last_upgrade_check = time.time()
-
# Rollover wandb to a new run.
if should_reinit_wandb(self):
bt.logging.info("Reinitializing wandb")
diff --git a/scripts/redis/README.md b/scripts/redis/README.md
index e401c9cf..130f9157 100644
--- a/scripts/redis/README.md
+++ b/scripts/redis/README.md
@@ -296,7 +296,7 @@ To create a Redis dump manually, you can use the python script `redis_dump.py`.
For example, if you want to create the dump in the `subVortex` directory, you can run
```
-python3 ./scripts/redis/utils/redis_dump.py --run-type create
+python3 ./scripts/redis/utils/redis_dump.py --run-type create --dump-path redis-dump-2.0.0.json
```
If you want to create the dump in another location and/or name, you can use the argument `--dump-path`
@@ -312,11 +312,11 @@ To restore a Redis dump manually, you can use the python script `redis_dump.py`.
For example, if you want to create in `subVortex` directory, you can run
```
-python3 ./scripts/redis/utils/redis_dump.py --run-type restore
+python3 ./scripts/redis/utils/redis_dump.py --run-type restore --dump-path redis-dump-2.0.0.json
```
If you want to restore a dump in another location, you can use the argument `--dump-path`
```
-python3 ./scripts/redis/utils/redis_dump.py --run-type restore --dump-path /tmp/redis
+python3 ./scripts/redis/utils/redis_dump.py --run-type restore --dump-path /tmp/redis/redis-dump-2.0.0.json
```
diff --git a/scripts/redis/utils/redis_dump.py b/scripts/redis/utils/redis_dump.py
index dc7da88f..71a2d3ad 100644
--- a/scripts/redis/utils/redis_dump.py
+++ b/scripts/redis/utils/redis_dump.py
@@ -71,7 +71,7 @@ async def main(args):
parser.add_argument(
"--dump-path",
type=str,
- default="",
+ default="/tmp/redis",
help="Dump file (with path) to create or restore",
)
parser.add_argument(
diff --git a/scripts/release/release-2.2.3/RELEASE-2.2.3.md b/scripts/release/release-2.2.3/RELEASE-2.2.3.md
index 6a446171..8142681f 100644
--- a/scripts/release/release-2.2.3/RELEASE-2.2.3.md
+++ b/scripts/release/release-2.2.3/RELEASE-2.2.3.md
@@ -24,11 +24,13 @@ Previous Release: 2.2.2
## Rollout Process
1. **Upgrade Subnet**: Fetch the remote tags
+
```bash
git fetch --tags --force
```
Then, checkout the new release tag
+
```bash
git checkout tags/v2.2.3
```
@@ -40,13 +42,38 @@ Previous Release: 2.2.2
pip install -e .
```
-2. **Restart validator**: Restart your validator to take the new version
+2. **Delete validator**: Remove your validator
```bash
- pm2 restart validator-7
+ pm2 delete validator-7
```
-3. **Check logs**: Check the validator logs to see if you see some `New Block`
+ Use `pm2 show validator-7` to get the list of arguments you were using to be able to restore them in the step 3.
+
+3. **Start validator in auto-upgrade mode**: Start the validator by running in **Subvortex**
+
+ ```bash
+ pm2 start neurons/validator.py -f \
+ --name validator-7 \
+ --interpreter python3 -- \
+ --netuid 7 \
+ --wallet.name $WALLET_NAME \
+ --wallet.hotkey $HOTKEY_NAME \
+ --subtensor.chain_endpoint ws://$IP:9944 \
+ --logging.debug \
+ --auto-update
+ ```
+
+ Replace **$WALLET_NAME**, **$HOTKEY_NAME** and **$IP** by the expected value.
+ If you had other arguments, please add them!
+
+ > IMPORTANT
+ > Do not forget to provide the `--auto-update` argument.
+
+ > IMPORTANT
+ > Use wandb without overriding the default value, as it will enable the Subvortex team to monitor the version of the validators and take action if necessary.
+
+4. **Check logs**: Check the validator logs to see if you see some `New Block`
```bash
pm2 logs validator-7
```
@@ -58,6 +85,7 @@ Previous Release: 2.2.2
If any issues arise during or after the rollout, follow these steps to perform a rollback:
1. **Downgrade Subnet**: Checkout the previous release tag
+
```bash
git checkout tags/v2.2.2
```
@@ -69,13 +97,34 @@ If any issues arise during or after the rollout, follow these steps to perform a
pip install -e .
```
-2. **Restart validator**: Restart your validator to take the new version
+2. **Delete validator**: Remove your validator
```bash
- pm2 restart validator-7
+ pm2 delete validator-7
```
-3. **Check logs**: Check the validator logs to see if you see some `New Block`
+ Use `pm2 show validator-7` to get the list of arguments you were using to be able to restore them in the step 3.
+
+3. **Start validator**: Start the validator by running in **Subvortex**
+
+ ```bash
+ pm2 start neurons/validator.py -f \
+ --name validator-7 \
+ --interpreter python3 -- \
+ --netuid 7 \
+ --wallet.name $WALLET_NAME \
+ --wallet.hotkey $HOTKEY_NAME \
+ --subtensor.chain_endpoint ws://$IP:9944 \
+ --logging.debug
+ ```
+
+ Replace **$WALLET_NAME**, **$HOTKEY_NAME** and **$IP** by the expected value.
+ If you had other arguments, please add them!
+
+ > IMPORTANT
+ > Do not forget to remove the `--auto-update` argument.
+
+4. **Check logs**: Check the validator logs to see if you see some `New Block`
```bash
pm2 logs validator-7
```
@@ -87,11 +136,13 @@ If any issues arise during or after the rollout, follow these steps to perform a
## Rollout Process
1. **Upgrade Subnet**: Fetch the remote tags
+
```bash
git fetch --tags --force
```
Then, checkout the new release tag
+
```bash
git checkout tags/v2.2.3
```
@@ -103,13 +154,34 @@ If any issues arise during or after the rollout, follow these steps to perform a
pip install -e .
```
-2. **Restart validator**: Restart your validator to take the new version
+2. **Delete miner**: Remove your miner
```bash
- pm2 restart miner-7
+ pm2 delete miner-7
```
-3. **Check logs**: Check the validator logs to see if you see some `New Block`
+ Use `pm2 show miner-7` to get the list of arguments you were using to be able to restore them in the step 3.
+
+3. **Start validator in auto-upgrade mode**: Start the miner by running in **Subvortex**
+
+ ```bash
+ pm2 start neurons/miner.py -f \
+ --name miner-7 \
+ --interpreter python3 -- \
+ --netuid 7 \
+ --wallet.name $WALLET_NAME \
+ --wallet.hotkey $HOTKEY_NAME \
+ --logging.debug \
+ --auto-update
+ ```
+
+ Replace **$WALLET_NAME**, **$HOTKEY_NAME** and **$IP** by the expected value.
+ If you had other arguments, please add them!
+
+ > IMPORTANT
+ > Do not forget to provide the `--auto-update` argument.
+
+4. **Check logs**: Check the miner logs to see if you see some `New Block`
```bash
pm2 logs miner-7
```
@@ -117,6 +189,7 @@ If any issues arise during or after the rollout, follow these steps to perform a
## Rollback Process
1. **Downgrade Subnet**: Checkout the previous release tag
+
```bash
git checkout tags/v2.2.2
```
@@ -128,13 +201,33 @@ If any issues arise during or after the rollout, follow these steps to perform a
pip install -e .
```
-2. **Restart validator**: Restart your validator to take the new version
+2. **Delete miner**: Remove your miner
+
+ ```bash
+ pm2 delete miner-7
+ ```
+
+ Use `pm2 show miner-7` to get the list of arguments you were using to be able to restore them in the step 3.
+
+3. **Start miner**: Start the miner by running in **Subvortex**
```bash
- pm2 restart miner-7
+ pm2 start neurons/miner.py -f \
+ --name miner-7 \
+ --interpreter python3 -- \
+ --netuid 7 \
+ --wallet.name $WALLET_NAME \
+ --wallet.hotkey $HOTKEY_NAME \
+ --logging.debug
```
-3. **Check logs**: Check the validator logs to see if you see some `New Block`
+ Replace **$WALLET_NAME**, **$HOTKEY_NAME** and **$IP** by the expected value.
+ If you had other arguments, please add them!
+
+ > IMPORTANT
+ > Do not forget to remove the `--auto-update` argument
+
+4. **Check logs**: Check the miner logs to see if you see some `New Block`
```bash
pm2 logs miner-7
```
diff --git a/scripts/setup_and_run.sh b/scripts/setup_and_run.sh
index 872e80be..3c62e12d 100755
--- a/scripts/setup_and_run.sh
+++ b/scripts/setup_and_run.sh
@@ -212,7 +212,8 @@ if [[ "$TYPE" == "miner" ]]; then
--subtensor.network local \
--wallet.name $WALLET_NAME \
--wallet.hotkey $HOTKEY_NAME \
- --logging.debug
+ --logging.debug \
+ --auto-update
fi
# Run validator
@@ -239,6 +240,7 @@ if [[ "$TYPE" == "validator" ]]; then
--netuid $NETUID \
--wallet.name $WALLET_NAME \
--wallet.hotkey $HOTKEY_NAME \
- --logging.debug \
+ --logging.debug \
+ --auto-update \
$OPTIONS
fi
diff --git a/scripts/subnet/README.md b/scripts/subnet/README.md
index 53697522..565067dc 100644
--- a/scripts/subnet/README.md
+++ b/scripts/subnet/README.md
@@ -72,7 +72,7 @@ To uprade the Subnet manually, you can use the python script `subnet_upgrade.py`
For example, if you are on tag v2.2.2 and want to migrate to the tag v2.2.3, you can run in `SubVortex`
```
-python3 ./scripts/subnet/subnet_upgrade.py --tag v2.2.3
+python3 ./scripts/subnet/utils/subnet_upgrade.py --tag v2.2.3
```
## Downgrade
@@ -82,5 +82,5 @@ To downgrade the Subnet manually, you can use the python script `subnet_upgrade.
For example, if you are on tag v2.2.3 and want to downgrade to the tag v2.2.2, you can run in `SubVortex`
```
-python3 ./scripts/subnet/subnet_upgrade.py --tag v2.2.2
+python3 ./scripts/subnet/utils/subnet_upgrade.py --tag v2.2.2
```
diff --git a/subnet/miner/run.py b/subnet/miner/run.py
index ad444ac4..ac3f1d5a 100644
--- a/subnet/miner/run.py
+++ b/subnet/miner/run.py
@@ -7,6 +7,7 @@
from subnet.miner.version import VersionControl
+
def run(self):
"""
Initiates and manages the main loop for the miner on the Bittensor network.
@@ -41,7 +42,7 @@ def run(self):
netuid = self.config.netuid
- version_control = VersionControl()
+ self.version_control = VersionControl()
# Keep a track of last upgrade check
self.last_upgrade_check = 0
@@ -63,11 +64,11 @@ def handler(obj, update_nr, subscription_id):
if should_upgrade(self.config.auto_update, self.last_upgrade_check):
bt.logging.debug("Checking upgrade")
- must_restart = version_control.upgrade()
+ must_restart = self.version_control.upgrade()
if must_restart:
self.version_control.restart()
return
-
+
self.last_upgrade_check = time.time()
bt.logging.debug(
@@ -77,4 +78,4 @@ def handler(obj, update_nr, subscription_id):
if self.should_exit:
return True
- block_handler_substrate.subscribe_block_headers(handler)
\ No newline at end of file
+ block_handler_substrate.subscribe_block_headers(handler)
diff --git a/subnet/shared/utils.py b/subnet/shared/utils.py
index 0da04823..0f5007a0 100644
--- a/subnet/shared/utils.py
+++ b/subnet/shared/utils.py
@@ -20,6 +20,10 @@
import subprocess
import bittensor as bt
+# Check if there is an update every 5 minutes
+# Github Rate Limit 60 requests per hour / per ip (Reference: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28)
+CHECK_UPDATE_FREQUENCY = 5 * 60
+
def get_redis_password(
redis_password: str = None, redis_conf: str = "/etc/redis/redis.conf"
@@ -48,8 +52,7 @@ def get_redis_password(
def should_upgrade(auto_update: bool, last_upgrade_check: float):
"""
- True if it is sime to upgrade, false otherwise
- For now, upgrading evering 60 seconds
+ True if it is time to upgrade, false otherwise
"""
time_since_last_update = time.time() - last_upgrade_check
- return time_since_last_update >= 60 and auto_update
+ return time_since_last_update >= CHECK_UPDATE_FREQUENCY and auto_update
diff --git a/subnet/validator/config.py b/subnet/validator/config.py
index 12aee69b..c5c2e335 100644
--- a/subnet/validator/config.py
+++ b/subnet/validator/config.py
@@ -213,6 +213,12 @@ def add_args(cls, parser):
help="Redis configuration path.",
default="/etc/redis/redis.conf",
)
+ parser.add_argument(
+ "--database.redis_dump_path",
+ type=str,
+ help="Redis directory where to store dumps.",
+ default="/etc/redis/",
+ )
# Auto update
parser.add_argument(
diff --git a/subnet/validator/version.py b/subnet/validator/version.py
index 0c9a974b..9dc12b4e 100644
--- a/subnet/validator/version.py
+++ b/subnet/validator/version.py
@@ -11,11 +11,13 @@
set_version,
)
+LAST_VERSION_BEFORE_AUTO_UPDATE = "2.2.0"
+
class VersionControl(BaseVersionControl):
- def __init__(self, database):
+ def __init__(self, database, dump_path: str):
super().__init__()
- self.redis = Redis(database)
+ self.redis = Redis(database, dump_path)
def restart(self):
bt.logging.info(f"Restarting validator...")
@@ -39,7 +41,7 @@ async def upgrade_redis(self):
# Get the local version
active_version = await self.redis.get_version()
- local_version = active_version or remote_version
+ local_version = active_version or LAST_VERSION_BEFORE_AUTO_UPDATE
bt.logging.info(f"[Redis] Local version: {local_version}")
# Check if the subnet has to be upgraded
@@ -53,7 +55,8 @@ async def upgrade_redis(self):
self.must_restart = True
# Dump the database
- dump_name = f"redis-dump-{local_version}"
+ dump_path = self.redis.dump_path
+ dump_name = os.path.join(dump_path, f"redis-dump-{local_version}")
await create_dump(dump_name, self.redis.database)
bt.logging.info(f"[Redis] Dump {dump_name} created")
@@ -82,7 +85,8 @@ async def upgrade_redis(self):
remote_version, local_version
)
if not success_rollback:
- dump_name = f"redis-dump-{local_version}"
+ dump_path = self.redis.dump_path
+ dump_name = os.path.join(dump_path, f"redis-dump-{local_version}")
await restore_dump(dump_name, self.redis.database)
bt.logging.info(f"[Redis] Dump {dump_name} restored")
diff --git a/subnet/version/github_controller.py b/subnet/version/github_controller.py
index d40d78f0..ca6573f2 100644
--- a/subnet/version/github_controller.py
+++ b/subnet/version/github_controller.py
@@ -11,9 +11,10 @@
class Github:
- def __init__(self, repo_owner="eclipsevortex", repo_name="SubVortexVC"):
+ def __init__(self, repo_owner="eclipsevortex", repo_name="SubVortex"):
self.repo_owner = repo_owner
self.repo_name = repo_name
+ self.latest_version = None
def get_version(self) -> str:
with codecs.open(
@@ -28,14 +29,19 @@ def get_version(self) -> str:
def get_latest_version(self) -> str:
"""
Get the latest release on github
+ Return the cached value if any errors
"""
- url = f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}/releases/latest"
- response = requests.get(url)
- if response.status_code != 200:
- return None
+ try:
+ url = f"https://api.github.com/repos/{self.repo_owner}/{self.repo_name}/releases/latest"
+ response = requests.get(url)
+ if response.status_code != 200:
+ return self.latest_version
- latest_version = response.json()["tag_name"]
- return latest_version[1:]
+ latest_version = response.json()["tag_name"]
+ self.latest_version = latest_version[1:]
+ return self.latest_version
+ except Exception:
+ return self.latest_version
def get_branch(self, tag="latest"):
"""
diff --git a/subnet/version/redis_controller.py b/subnet/version/redis_controller.py
index 7f5c49e7..8bbdd880 100644
--- a/subnet/version/redis_controller.py
+++ b/subnet/version/redis_controller.py
@@ -8,15 +8,16 @@
class Redis:
- def __init__(self, database):
+ def __init__(self, database, dump_path: str):
self.database = database
+ self.dump_path = dump_path
async def get_version(self):
version = await _get_version(self.database)
return version
def get_latest_version(self):
- migration = get_migrations(True)
+ migration = get_migrations(force_new=True, reverse=True)
return migration[0][1] if len(migration) > 0 else None
async def rollout(self, from_version: str, to_version: str):
@@ -27,13 +28,9 @@ async def rollout(self, from_version: str, to_version: str):
lower_version = int(from_version.replace(".", ""))
# List all the migration to execute
- migration_scripts = get_migrations()
- migrations = [
- x
- for x in migration_scripts
- if x[0] > lower_version and x[0] <= upper_version
- ]
- migrations = sorted(migrations, key=lambda x: x[0])
+ migrations = get_migrations(
+ filter_lambda=lambda x: x[0] > lower_version and x[0] <= upper_version
+ )
version = None
try:
@@ -51,14 +48,13 @@ async def rollout(self, from_version: str, to_version: str):
# Rollout the migration
await module.rollout(self.database)
- # Update the version in the database
- new_version = await self.get_version()
- if new_version:
- bt.logging.success(f"[Redis] Rollout to {new_version} successful")
- else:
- bt.logging.success(f"[Redis] Rollout successful")
+ # Log to keep track
+ bt.logging.debug(f"[Redis] Rollout to {version} successful")
+
+ # Update the version in the database
+ bt.logging.success(f"[Redis] Rollout to {to_version} successful")
- return True
+ return True
except Exception as err:
bt.logging.error(f"[Redis] Failed to upgrade to {version}: {err}")
@@ -69,13 +65,10 @@ async def rollback(self, from_version: str, to_version: str = "0.0.0"):
lower_version = int(to_version.replace(".", ""))
# List all the migration to execute
- migration_scripts = get_migrations()
- migrations = [
- x
- for x in migration_scripts
- if x[0] > lower_version and x[0] <= upper_version
- ]
- migrations = sorted(migrations, key=lambda x: x[0])
+ migrations = get_migrations(
+ reverse=True,
+ filter_lambda=lambda x: x[0] > lower_version and x[0] <= upper_version,
+ )
version = None
try:
@@ -93,12 +86,14 @@ async def rollback(self, from_version: str, to_version: str = "0.0.0"):
# Rollback the migration
await module.rollback(self.database)
- # Update the version in the database
- new_version = await self.get_version()
- if new_version:
- bt.logging.success(f"[Redis] Rollback to {new_version} successful")
+ # Log to keep track
+ if version:
+ bt.logging.debug(f"[Redis] Rollback from {version} successful")
else:
- bt.logging.success(f"[Redis] Rollback successful")
+ bt.logging.debug("[Redis] Rollback successful")
+
+ # Update the version in the database
+ bt.logging.success(f"[Redis] Rollback to {to_version} successful")
return True
except Exception as err:
diff --git a/subnet/version/utils.py b/subnet/version/utils.py
index da804af5..6b46fa50 100644
--- a/subnet/version/utils.py
+++ b/subnet/version/utils.py
@@ -27,7 +27,7 @@ def extract_number(s):
return None
-def get_migrations(force_new=False):
+def get_migrations(force_new=False, reverse=False, filter_lambda = None):
"""
List all the migrations available
"""
@@ -55,8 +55,12 @@ def get_migrations(force_new=False):
(int(f"{major}{minor}{patch}"), f"{major}.{minor}.{patch}", file)
)
+ # Filter the migrations
+ if filter_lambda:
+ migrations = filter(filter_lambda, migrations)
+
# Sort migration per version
- migrations = sorted(migrations, key=lambda x: x[0], reverse=True)
+ migrations = sorted(migrations, key=lambda x: x[0], reverse=reverse)
except Exception as ex:
bt.logging.error(f"Could not load the migrations: {ex}")
diff --git a/tests/unit_tests/subnet/validator/test_validator_version_control.py b/tests/unit_tests/subnet/validator/test_validator_version_control.py
index 65dd4355..fd75d5ea 100644
--- a/tests/unit_tests/subnet/validator/test_validator_version_control.py
+++ b/tests/unit_tests/subnet/validator/test_validator_version_control.py
@@ -31,6 +31,7 @@ async def test_no_new_version_available_when_upgradring_should_do_nothing(
mock_redis_class.database = AsyncMock(aioredis.Redis)
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.0.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis.return_value = mock_redis_class
mock_github_class = MagicMock()
@@ -41,7 +42,9 @@ async def test_no_new_version_available_when_upgradring_should_do_nothing(
mock_interpreter_class = MagicMock()
mock_interpreter.return_value = mock_interpreter_class
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(
+ mock_redis.database, mock_redis_class.dump_path
+ )
# Act
must_restart = await vc.upgrade()
@@ -79,6 +82,7 @@ async def test_new_higher_validator_version_available_when_upgradring_should_upg
mock_redis_class.database = AsyncMock(aioredis.Redis)
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.0.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis.return_value = mock_redis_class
mock_github_class = MagicMock()
@@ -89,7 +93,9 @@ async def test_new_higher_validator_version_available_when_upgradring_should_upg
mock_interpreter_class = MagicMock()
mock_interpreter.return_value = mock_interpreter_class
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(
+ mock_redis.database, mock_redis_class.dump_path
+ )
# Act
must_restart = await vc.upgrade()
@@ -127,6 +133,7 @@ async def test_new_validator_higher_version_available_when_failing_upgrading_sho
mock_redis_class.database = AsyncMock(aioredis.Redis)
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.0.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis.return_value = mock_redis_class
mock_github_class = MagicMock()
@@ -141,7 +148,9 @@ async def test_new_validator_higher_version_available_when_failing_upgrading_sho
)
mock_interpreter.return_value = mock_interpreter_class
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(
+ mock_redis.database, mock_redis_class.dump_path
+ )
# Act
must_restart = await vc.upgrade()
@@ -179,6 +188,7 @@ async def test_current_validator_version_removed_when_upgradring_should_downgrad
mock_redis_class.database = AsyncMock(aioredis.Redis)
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.0.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis.return_value = mock_redis_class
mock_github_class = MagicMock()
@@ -189,7 +199,7 @@ async def test_current_validator_version_removed_when_upgradring_should_downgrad
mock_interpreter_class = MagicMock()
mock_interpreter.return_value = mock_interpreter_class
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(mock_redis.database, mock_redis_class.dump_path)
# Act
must_restart = await vc.upgrade()
@@ -226,6 +236,7 @@ async def test_new_higher_redis_version_available_when_upgradring_should_upgrade
mock_redis_class = MagicMock()
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.1.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis_class.rollout = AsyncMock()
mock_redis.return_value = mock_redis_class
@@ -239,7 +250,7 @@ async def test_new_higher_redis_version_available_when_upgradring_should_upgrade
mock_create_dump.return_value = AsyncMock()
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(mock_redis.database, mock_redis_class.dump_path)
# Act
must_restart = await vc.upgrade()
@@ -250,7 +261,7 @@ async def test_new_higher_redis_version_available_when_upgradring_should_upgrade
mock_redis_class.rollout.assert_called_once_with("2.0.0", "2.1.0")
mock_redis_class.rollback.assert_not_called()
mock_create_dump.assert_called_once_with(
- "redis-dump-2.0.0", mock_redis_class.database
+ "/etc/redis/redis-dump-2.0.0", mock_redis_class.database
)
mock_restore_dump.assert_not_called()
more_create_dump_migrations.assert_called_once()
@@ -278,6 +289,7 @@ async def test_new_higher_redis_version_available_when_failing_upgrading_should_
mock_redis_class = MagicMock()
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.1.0"
+ mock_redis_class.dump_path = "/etc/redis"
rollout_side_effect.called = False
mock_redis_class.rollout.side_effect = rollout_side_effect
mock_redis_class.rollback = AsyncMock()
@@ -293,7 +305,7 @@ async def test_new_higher_redis_version_available_when_failing_upgrading_should_
mock_create_dump.return_value = AsyncMock()
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(mock_redis.database, mock_redis_class.dump_path)
# Act
must_restart = await vc.upgrade()
@@ -304,7 +316,7 @@ async def test_new_higher_redis_version_available_when_failing_upgrading_should_
mock_redis_class.rollout.assert_called_once_with("2.0.0", "2.1.0")
mock_redis_class.rollback.assert_called_once_with("2.1.0", "2.0.0")
mock_create_dump.assert_called_once_with(
- "redis-dump-2.0.0", mock_redis_class.database
+ "/etc/redis/redis-dump-2.0.0", mock_redis_class.database
)
mock_restore_dump.assert_not_called()
more_create_dump_migrations.assert_called_once()
@@ -332,6 +344,7 @@ async def test_new_higher_redis_version_available_when_failing_upgrading_and_fai
mock_redis_class = MagicMock()
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.1.0"
+ mock_redis_class.dump_path = "/etc/redis"
rollout_side_effect.called = False
mock_redis_class.rollout.side_effect = rollout_side_effect
mock_redis_class.rollback = AsyncMock(return_value=False)
@@ -347,7 +360,7 @@ async def test_new_higher_redis_version_available_when_failing_upgrading_and_fai
mock_create_dump.return_value = AsyncMock()
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(mock_redis.database, mock_redis_class.dump_path)
# Act
must_restart = await vc.upgrade()
@@ -358,10 +371,10 @@ async def test_new_higher_redis_version_available_when_failing_upgrading_and_fai
mock_redis_class.rollout.assert_called_once_with("2.0.0", "2.1.0")
mock_redis_class.rollback.assert_called_once_with("2.1.0", "2.0.0")
mock_create_dump.assert_called_once_with(
- "redis-dump-2.0.0", mock_redis_class.database
+ "/etc/redis/redis-dump-2.0.0", mock_redis_class.database
)
mock_restore_dump.assert_called_once_with(
- "redis-dump-2.0.0", mock_redis_class.database
+ "/etc/redis/redis-dump-2.0.0", mock_redis_class.database
)
more_create_dump_migrations.assert_called_once()
more_remove_dump_migrations.assert_has_calls([call(), call()])
@@ -389,6 +402,7 @@ async def test_current_redis_version_removed_when_upgradring_should_downgrade_re
mock_redis_class.database = AsyncMock(aioredis.Redis)
mock_redis_class.get_version = AsyncMock(return_value="2.1.0")
mock_redis_class.get_latest_version.return_value = "2.0.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis_class.rollout = AsyncMock()
mock_redis_class.rollback = AsyncMock()
mock_redis.return_value = mock_redis_class
@@ -403,7 +417,7 @@ async def test_current_redis_version_removed_when_upgradring_should_downgrade_re
mock_create_dump.return_value = AsyncMock()
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(mock_redis.database, mock_redis_class.dump_path)
# Act
must_restart = await vc.upgrade()
@@ -414,7 +428,7 @@ async def test_current_redis_version_removed_when_upgradring_should_downgrade_re
mock_redis_class.rollout.assert_not_called()
mock_redis_class.rollback.assert_called_once_with("2.1.0", "2.0.0")
mock_create_dump.assert_called_once_with(
- "redis-dump-2.1.0", mock_redis_class.database
+ "/etc/redis/redis-dump-2.1.0", mock_redis_class.database
)
mock_restore_dump.assert_not_called()
more_create_dump_migrations.assert_called_once()
@@ -442,6 +456,7 @@ async def test_new_higher_miner_and_redis_version_available_when_upgradring_shou
mock_redis_class = MagicMock()
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.1.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis_class.rollout = AsyncMock()
mock_redis.return_value = mock_redis_class
@@ -455,7 +470,7 @@ async def test_new_higher_miner_and_redis_version_available_when_upgradring_shou
mock_create_dump.return_value = AsyncMock()
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(mock_redis.database, mock_redis_class.dump_path)
# Act
must_restart = await vc.upgrade()
@@ -465,7 +480,7 @@ async def test_new_higher_miner_and_redis_version_available_when_upgradring_shou
mock_interpreter_class.upgrade_dependencies.assert_called_once()
mock_redis_class.rollout.assert_called_once_with("2.0.0", "2.1.0")
mock_create_dump.assert_called_once_with(
- "redis-dump-2.0.0", mock_redis_class.database
+ "/etc/redis/redis-dump-2.0.0", mock_redis_class.database
)
mock_restore_dump.assert_not_called()
more_create_dump_migrations.assert_called_once()
@@ -493,6 +508,7 @@ async def test_new_higher_miner_and_redis_version_available_when_validator_uprad
mock_redis_class = MagicMock()
mock_redis_class.get_version = AsyncMock(return_value="2.0.0")
mock_redis_class.get_latest_version.return_value = "2.1.0"
+ mock_redis_class.dump_path = "/etc/redis"
mock_redis_class.rollout = AsyncMock()
rollout_side_effect.called = False
mock_redis_class.rollout.side_effect = rollout_side_effect
@@ -509,7 +525,7 @@ async def test_new_higher_miner_and_redis_version_available_when_validator_uprad
mock_create_dump.return_value = AsyncMock()
- vc = VersionControl(mock_redis.database)
+ vc = VersionControl(mock_redis.database, mock_redis_class.dump_path)
# Act
must_restart = await vc.upgrade()
@@ -520,7 +536,7 @@ async def test_new_higher_miner_and_redis_version_available_when_validator_uprad
mock_redis_class.rollout.assert_called_once_with("2.0.0", "2.1.0")
mock_redis_class.rollback.assert_called_once_with("2.1.0", "2.0.0")
mock_create_dump.assert_called_once_with(
- "redis-dump-2.0.0", mock_redis_class.database
+ "/etc/redis/redis-dump-2.0.0", mock_redis_class.database
)
mock_restore_dump.assert_not_called()
more_create_dump_migrations.assert_called_once()
diff --git a/tests/unit_tests/subnet/version/test_get_latest_version.py b/tests/unit_tests/subnet/version/test_get_latest_version.py
new file mode 100644
index 00000000..e04e5697
--- /dev/null
+++ b/tests/unit_tests/subnet/version/test_get_latest_version.py
@@ -0,0 +1,99 @@
+import unittest
+from unittest.mock import patch
+
+from subnet.version.github_controller import Github
+
+
+class TestGithubController(unittest.IsolatedAsyncioTestCase):
+ @patch("requests.get")
+ @patch("codecs.open")
+ def test_request_latest_successful_should_return_the_latest_version(
+ self, mock_open, mock_request
+ ):
+ # Arrange
+ mock_open.return_value.__enter__.return_value.read.return_value = ""
+
+ mock_request.return_value.status_code = 200
+ mock_request.return_value.json.return_value = {"tag_name": "v2.2.3"}
+
+ github = Github()
+
+ # Act
+ result = github.get_latest_version()
+
+ # Assert
+ assert "2.2.3" == result
+
+ @patch("requests.get")
+ @patch("codecs.open")
+ def test_request_latest_failed_and_no_cached_version_exist_should_return_none(
+ self, mock_open, mock_request
+ ):
+ # Arrange
+ mock_open.return_value.__enter__.return_value.read.return_value = ""
+
+ mock_request.return_value.status_code = 300
+ mock_request.return_value.json.return_value = {"tag_name": "v2.2.3"}
+
+ github = Github()
+
+ # Act
+ result = github.get_latest_version()
+
+ # Assert
+ assert None == result
+
+ @patch("requests.get")
+ @patch("codecs.open")
+ def test_request_latest_failed_and_a_cached_version_exist_should_return_the_cached_version(
+ self, mock_open, mock_request
+ ):
+ # Arrange
+ mock_open.return_value.__enter__.return_value.read.return_value = ""
+
+ mock_request.return_value.status_code = 200
+ mock_request.return_value.json.return_value = {"tag_name": "v2.2.3"}
+
+ github = Github()
+
+ # Act
+ result1 = github.get_latest_version()
+
+ # Assert
+ assert "2.2.3" == result1
+
+ # Arrange
+ mock_request.return_value.status_code = 300
+ mock_request.return_value.json.return_value = {"tag_name": "v2.2.4"}
+
+ # Act
+ result2 = github.get_latest_version()
+
+ assert "2.2.3" == result2
+
+ @patch("requests.get")
+ @patch("codecs.open")
+ def test_request_latest_throw_exception_and_a_cached_version_exist_should_return_the_cached_version(
+ self, mock_open, mock_request
+ ):
+ # Arrange
+ mock_open.return_value.__enter__.return_value.read.return_value = ""
+
+ mock_request.return_value.status_code = 200
+ mock_request.return_value.json.return_value = {"tag_name": "v2.2.3"}
+
+ github = Github()
+
+ # Act
+ result1 = github.get_latest_version()
+
+ # Assert
+ assert "2.2.3" == result1
+
+ # Arrange
+ mock_request.return_value.json.return_value = ValueError("Simulated error")
+
+ # Act
+ result2 = github.get_latest_version()
+
+ assert "2.2.3" == result2
diff --git a/tests/unit_tests/subnet/version/test_get_migrations.py b/tests/unit_tests/subnet/version/test_get_migrations.py
index c0471602..f390e707 100644
--- a/tests/unit_tests/subnet/version/test_get_migrations.py
+++ b/tests/unit_tests/subnet/version/test_get_migrations.py
@@ -6,7 +6,7 @@
class TestUtilVersionControl(unittest.IsolatedAsyncioTestCase):
@patch("os.listdir")
- async def test_no_migration_available_should_return_an_empty_list(
+ async def test_forward_order_with_order_with_no_migration_available_should_return_an_empty_list(
self, mock_listdir
):
# Arrange
@@ -19,7 +19,7 @@ async def test_no_migration_available_should_return_an_empty_list(
assert 0 == len(result)
@patch("os.listdir")
- async def test_migration_available_should_return_a_list_in_the_right_order(
+ async def test_forward_order_with_migration_available_should_return_a_list_in_the_right_order(
self, mock_listdir
):
# Arrange
@@ -32,6 +32,75 @@ async def test_migration_available_should_return_a_list_in_the_right_order(
# Act
result = get_migrations()
+ # Assert
+ assert 3 == len(result)
+ assert (200, "2.0.0", "migration-2.0.0.py") == result[0]
+ assert (210, "2.1.0", "migration-2.1.0.py") == result[1]
+ assert (211, "2.1.1", "migration-2.1.1.py") == result[2]
+
+ @patch("os.listdir")
+ async def test_forward_order_with_migration_available_and_filter_applied_should_return_a_list_in_the_right_order(
+ self, mock_listdir
+ ):
+ # Arrange
+ mock_listdir.return_value = [
+ "migration-2.1.0.py",
+ "migration-2.0.0.py",
+ "migration-2.1.1.py",
+ ]
+
+ # Act
+ result = get_migrations(filter_lambda=lambda x: x[0] > 200 and x[0] <= 211)
+
+ # Assert
+ assert 2 == len(result)
+ assert (210, "2.1.0", "migration-2.1.0.py") == result[0]
+ assert (211, "2.1.1", "migration-2.1.1.py") == result[1]
+
+ @patch("os.listdir")
+ async def test_forward_order_with_migration_available_when_few_does_match_the_expected_pattern_should_return_a_list_without_these_wrong_formatted_files(
+ self, mock_listdir
+ ):
+ # Arrange
+ mock_listdir.return_value = [
+ "migration2.1.1.py",
+ "migrations-2.0.0.py",
+ "migration-210.py",
+ ]
+
+ # Act
+ result = get_migrations()
+
+ # Assert
+ assert 0 == len(result)
+
+ @patch("os.listdir")
+ async def test_reverse_order_with_no_migration_available_should_return_an_empty_list(
+ self, mock_listdir
+ ):
+ # Arrange
+ mock_listdir.return_value = []
+
+ # Act
+ result = get_migrations(reverse=True)
+
+ # Assert
+ assert 0 == len(result)
+
+ @patch("os.listdir")
+ async def test_reverse_order_with_migration_available_should_return_a_list_in_the_right_order(
+ self, mock_listdir
+ ):
+ # Arrange
+ mock_listdir.return_value = [
+ "migration-2.1.0.py",
+ "migration-2.0.0.py",
+ "migration-2.1.1.py",
+ ]
+
+ # Act
+ result = get_migrations(reverse=True)
+
# Assert
assert 3 == len(result)
assert (211, "2.1.1", "migration-2.1.1.py") == result[0]
@@ -39,7 +108,28 @@ async def test_migration_available_should_return_a_list_in_the_right_order(
assert (200, "2.0.0", "migration-2.0.0.py") == result[2]
@patch("os.listdir")
- async def test_migration_available_when_few_does_match_the_expected_pattern_should_return_a_list_without_these_wrong_formatted_files(
+ async def test_reverse_order_with_migration_available_and_filter_applied_should_return_a_list_in_the_right_order(
+ self, mock_listdir
+ ):
+ # Arrange
+ mock_listdir.return_value = [
+ "migration-2.1.0.py",
+ "migration-2.0.0.py",
+ "migration-2.1.1.py",
+ ]
+
+ # Act
+ result = get_migrations(
+ reverse=True, filter_lambda=lambda x: x[0] > 200 and x[0] <= 211
+ )
+
+ # Assert
+ assert 2 == len(result)
+ assert (211, "2.1.1", "migration-2.1.1.py") == result[0]
+ assert (210, "2.1.0", "migration-2.1.0.py") == result[1]
+
+ @patch("os.listdir")
+ async def test_reverse_order_with_migration_available_when_few_does_match_the_expected_pattern_should_return_a_list_without_these_wrong_formatted_files(
self, mock_listdir
):
# Arrange
@@ -50,7 +140,7 @@ async def test_migration_available_when_few_does_match_the_expected_pattern_shou
]
# Act
- result = get_migrations()
+ result = get_migrations(reverse=True)
# Assert
assert 0 == len(result)