Skip to content

Commit

Permalink
remove refs to installed_redisai_backends
Browse files Browse the repository at this point in the history
  • Loading branch information
ankona committed Oct 24, 2024
1 parent 9f3a68f commit 629baa5
Show file tree
Hide file tree
Showing 10 changed files with 29 additions and 34 deletions.
5 changes: 3 additions & 2 deletions doc/tutorials/ml_inference/Inference-in-SmartSim.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,9 @@
],
"source": [
"## Installing the ML backends\n",
"from smartsim._core.utils.helpers import installed_redisai_backends\n",
"print(installed_redisai_backends())\n"
"# from smartsim._core.utils.helpers import installed_redisai_backends\n",
"#print(installed_redisai_backends())\n",
"# TODO: replace deprecated installed_redisai_backends"
]
},
{
Expand Down
3 changes: 1 addition & 2 deletions tests/_legacy/backends/test_cli_mini_exp.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import smartsim._core._cli.validate
import smartsim._core._install.builder as build
from smartsim._core._install.platform import Device
from smartsim._core.utils.helpers import installed_redisai_backends

sklearn_available = True
try:
Expand Down Expand Up @@ -71,7 +70,7 @@ def _mock_make_managed_local_feature_store(*a, **kw):
"_make_managed_local_feature_store",
_mock_make_managed_local_feature_store,
)
backends = installed_redisai_backends()
backends = [] # todo: update test to replace installed_redisai_backends()
(fs_port,) = fs.ports

smartsim._core._cli.validate.test_install(
Expand Down
9 changes: 6 additions & 3 deletions tests/_legacy/backends/test_dbmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import pytest

from smartsim import Experiment
from smartsim._core.utils import installed_redisai_backends
from smartsim.entity import Ensemble
from smartsim.entity.dbobject import FSModel
from smartsim.error.errors import SSUnsupportedError
Expand Down Expand Up @@ -70,7 +69,9 @@ def call(self, x):
except:
logger.warning("Could not set TF max memory limit for GPU")

should_run_tf &= "tensorflow" in installed_redisai_backends()
should_run_tf &= (
"tensorflow" in []
) # todo: update test to replace installed_redisai_backends()

# Check if PyTorch is available for tests
try:
Expand Down Expand Up @@ -107,7 +108,9 @@ def forward(self, x):
return output


should_run_pt &= "torch" in installed_redisai_backends()
should_run_pt &= (
"torch" in []
) # todo: update test to replace installed_redisai_backends()


def save_tf_cnn(path, file_name):
Expand Down
5 changes: 1 addition & 4 deletions tests/_legacy/backends/test_dbscript.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,15 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import os
import sys

import pytest
from smartredis import *

from smartsim import Experiment
from smartsim._core.utils import installed_redisai_backends
from smartsim.entity.dbobject import FSScript
from smartsim.error.errors import SSUnsupportedError
from smartsim.log import get_logger
from smartsim.settings import MpiexecSettings, MpirunSettings
from smartsim.status import JobStatus

logger = get_logger(__name__)
Expand All @@ -49,7 +46,7 @@
except ImportError:
should_run = False

should_run &= "torch" in installed_redisai_backends()
should_run &= "torch" in [] # todo: update test to replace installed_redisai_backends()


def timestwo(x):
Expand Down
6 changes: 3 additions & 3 deletions tests/_legacy/backends/test_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@

import pytest

from smartsim import Experiment
from smartsim._core.utils import installed_redisai_backends
from smartsim.status import JobStatus

sklearn_available = True
Expand All @@ -47,7 +45,9 @@
sklearn_available = False


onnx_backend_available = "onnxruntime" in installed_redisai_backends()
onnx_backend_available = (
"onnxruntime" in []
) # todo: update test to replace installed_redisai_backends()

should_run = sklearn_available and onnx_backend_available

Expand Down
6 changes: 3 additions & 3 deletions tests/_legacy/backends/test_tf.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@

import pytest

from smartsim import Experiment
from smartsim._core.utils import installed_redisai_backends
from smartsim.error import SmartSimError
from smartsim.status import JobStatus

Expand All @@ -43,7 +41,9 @@
print(e)
tf_available = False

tf_backend_available = "tensorflow" in installed_redisai_backends()
tf_backend_available = (
"tensorflow" in []
) # todo: update test to replace installed_redisai_backends()


@pytest.mark.skipif(
Expand Down
6 changes: 3 additions & 3 deletions tests/_legacy/backends/test_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@

import pytest

from smartsim import Experiment
from smartsim._core.utils import installed_redisai_backends
from smartsim.status import JobStatus

torch_available = True
Expand All @@ -40,7 +38,9 @@
except ImportError:
torch_available = False

torch_backend_available = "torch" in installed_redisai_backends()
torch_backend_available = (
"torch" in []
) # todo: update test to replace installed_redisai_backends()

should_run = torch_available and torch_backend_available
pytestmark = pytest.mark.skipif(
Expand Down
7 changes: 3 additions & 4 deletions tests/_legacy/test_smartredis.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,7 @@

import pytest

from smartsim import Experiment
from smartsim._core.utils import installed_redisai_backends
from smartsim.builders import Ensemble
from smartsim.database import FeatureStore
from smartsim.entity import Application
from smartsim.status import JobStatus

Expand All @@ -51,7 +48,9 @@
except ImportError:
shouldrun = False

torch_available = "torch" in installed_redisai_backends()
torch_available = (
"torch" in []
) # todo: update test to replace installed_redisai_backends()

shouldrun &= torch_available

Expand Down
6 changes: 3 additions & 3 deletions tests/dragon_wlm/test_core_machine_learning_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,8 @@
InferenceRequest,
MachineLearningWorkerCore,
RequestBatch,
TransformInputResult,
TransformOutputResult,
)
from smartsim._core.utils import installed_redisai_backends

from .feature_store import FileSystemFeatureStore, MemoryFeatureStore

Expand All @@ -53,7 +51,9 @@
is_dragon = (
pytest.test_launcher == "dragon" if hasattr(pytest, "test_launcher") else False
)
torch_available = "torch" in installed_redisai_backends()
torch_available = (
"torch" in []
) # todo: update test to replace installed_redisai_backends()


@pytest.fixture
Expand Down
10 changes: 3 additions & 7 deletions tests/mli/test_integrated_torch_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,22 +25,18 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pathlib
import typing as t

import pytest
import torch

# import smartsim.error as sse
# from smartsim._core.mli.infrastructure.control import workermanager as mli
# from smartsim._core.mli.message_handler import MessageHandler
from smartsim._core.utils import installed_redisai_backends

# The tests in this file belong to the group_b group
pytestmark = pytest.mark.group_b

# retrieved from pytest fixtures
is_dragon = pytest.test_launcher == "dragon"
torch_available = "torch" in installed_redisai_backends()
torch_available = (
"torch" in []
) # todo: update test to replace installed_redisai_backends()


@pytest.fixture
Expand Down

0 comments on commit 629baa5

Please sign in to comment.